@mcoda/core 0.1.7 → 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/CHANGELOG.md +4 -1
  2. package/README.md +22 -3
  3. package/dist/api/AgentsApi.d.ts +8 -1
  4. package/dist/api/AgentsApi.d.ts.map +1 -1
  5. package/dist/api/AgentsApi.js +70 -0
  6. package/dist/api/QaTasksApi.d.ts.map +1 -1
  7. package/dist/api/QaTasksApi.js +2 -0
  8. package/dist/api/TasksApi.d.ts.map +1 -1
  9. package/dist/api/TasksApi.js +1 -0
  10. package/dist/index.d.ts +4 -0
  11. package/dist/index.d.ts.map +1 -1
  12. package/dist/index.js +4 -0
  13. package/dist/prompts/PdrPrompts.d.ts.map +1 -1
  14. package/dist/prompts/PdrPrompts.js +3 -1
  15. package/dist/prompts/SdsPrompts.d.ts.map +1 -1
  16. package/dist/prompts/SdsPrompts.js +2 -0
  17. package/dist/services/agents/AgentRatingFormula.d.ts +27 -0
  18. package/dist/services/agents/AgentRatingFormula.d.ts.map +1 -0
  19. package/dist/services/agents/AgentRatingFormula.js +45 -0
  20. package/dist/services/agents/AgentRatingService.d.ts +41 -0
  21. package/dist/services/agents/AgentRatingService.d.ts.map +1 -0
  22. package/dist/services/agents/AgentRatingService.js +299 -0
  23. package/dist/services/agents/GatewayAgentService.d.ts +3 -0
  24. package/dist/services/agents/GatewayAgentService.d.ts.map +1 -1
  25. package/dist/services/agents/GatewayAgentService.js +68 -24
  26. package/dist/services/agents/GatewayHandoff.d.ts +7 -0
  27. package/dist/services/agents/GatewayHandoff.d.ts.map +1 -0
  28. package/dist/services/agents/GatewayHandoff.js +108 -0
  29. package/dist/services/backlog/TaskOrderingService.d.ts +1 -0
  30. package/dist/services/backlog/TaskOrderingService.d.ts.map +1 -1
  31. package/dist/services/backlog/TaskOrderingService.js +19 -16
  32. package/dist/services/docs/DocsService.d.ts +11 -1
  33. package/dist/services/docs/DocsService.d.ts.map +1 -1
  34. package/dist/services/docs/DocsService.js +240 -52
  35. package/dist/services/execution/GatewayTrioService.d.ts +133 -0
  36. package/dist/services/execution/GatewayTrioService.d.ts.map +1 -0
  37. package/dist/services/execution/GatewayTrioService.js +1125 -0
  38. package/dist/services/execution/QaFollowupService.d.ts +1 -0
  39. package/dist/services/execution/QaFollowupService.d.ts.map +1 -1
  40. package/dist/services/execution/QaFollowupService.js +1 -0
  41. package/dist/services/execution/QaProfileService.d.ts +6 -0
  42. package/dist/services/execution/QaProfileService.d.ts.map +1 -1
  43. package/dist/services/execution/QaProfileService.js +165 -3
  44. package/dist/services/execution/QaTasksService.d.ts +18 -0
  45. package/dist/services/execution/QaTasksService.d.ts.map +1 -1
  46. package/dist/services/execution/QaTasksService.js +712 -34
  47. package/dist/services/execution/WorkOnTasksService.d.ts +14 -0
  48. package/dist/services/execution/WorkOnTasksService.d.ts.map +1 -1
  49. package/dist/services/execution/WorkOnTasksService.js +1497 -240
  50. package/dist/services/openapi/OpenApiService.d.ts +10 -0
  51. package/dist/services/openapi/OpenApiService.d.ts.map +1 -1
  52. package/dist/services/openapi/OpenApiService.js +66 -10
  53. package/dist/services/planning/CreateTasksService.d.ts +6 -0
  54. package/dist/services/planning/CreateTasksService.d.ts.map +1 -1
  55. package/dist/services/planning/CreateTasksService.js +261 -28
  56. package/dist/services/planning/RefineTasksService.d.ts +5 -0
  57. package/dist/services/planning/RefineTasksService.d.ts.map +1 -1
  58. package/dist/services/planning/RefineTasksService.js +184 -35
  59. package/dist/services/review/CodeReviewService.d.ts +14 -0
  60. package/dist/services/review/CodeReviewService.d.ts.map +1 -1
  61. package/dist/services/review/CodeReviewService.js +657 -61
  62. package/dist/services/shared/ProjectGuidance.d.ts +6 -0
  63. package/dist/services/shared/ProjectGuidance.d.ts.map +1 -0
  64. package/dist/services/shared/ProjectGuidance.js +21 -0
  65. package/dist/services/tasks/TaskCommentFormatter.d.ts +20 -0
  66. package/dist/services/tasks/TaskCommentFormatter.d.ts.map +1 -0
  67. package/dist/services/tasks/TaskCommentFormatter.js +54 -0
  68. package/dist/workspace/WorkspaceManager.d.ts +4 -0
  69. package/dist/workspace/WorkspaceManager.d.ts.map +1 -1
  70. package/dist/workspace/WorkspaceManager.js +3 -0
  71. package/package.json +5 -5
@@ -10,36 +10,541 @@ import { JobService } from "../jobs/JobService.js";
10
10
  import { TaskSelectionService } from "./TaskSelectionService.js";
11
11
  import { TaskStateService } from "./TaskStateService.js";
12
12
  import { RoutingService } from "../agents/RoutingService.js";
13
+ import { AgentRatingService } from "../agents/AgentRatingService.js";
14
+ import { loadProjectGuidance } from "../shared/ProjectGuidance.js";
13
15
  const exec = promisify(execCb);
14
16
  const DEFAULT_BASE_BRANCH = "mcoda-dev";
15
17
  const DEFAULT_TASK_BRANCH_PREFIX = "mcoda/task/";
16
18
  const TASK_LOCK_TTL_SECONDS = 60 * 60;
19
+ const MAX_TEST_FIX_ATTEMPTS = 3;
20
+ const DEFAULT_TEST_OUTPUT_CHARS = 1200;
17
21
  const DEFAULT_CODE_WRITER_PROMPT = [
18
22
  "You are the code-writing agent. Before coding, query docdex with the task key and feature keywords (MCP `docdex_search` limit 4–8 or CLI `docdexd query --repo <repo> --query \"<term>\" --limit 6 --snippets=false`). If results look stale, reindex (`docdex_index` or `docdexd index --repo <repo>`) then re-run search. Fetch snippets via `docdex_open` or `/snippet/:doc_id?text_only=true` only for specific hits.",
19
23
  "Use docdex snippets to ground decisions (data model, offline/online expectations, constraints, acceptance criteria). Note when docdex is unavailable and fall back to local docs.",
20
24
  "Re-use existing store/slices/adapters and tests; avoid inventing new backends or ad-hoc actions. Keep behavior backward-compatible and scoped to the documented contracts.",
21
25
  "If you encounter merge conflicts, resolve them first (clean conflict markers and ensure code compiles) before continuing task work.",
22
- "If a target file does not exist, create it by outputting a FILE block (not a diff): `FILE: path/to/file.ext` followed by a fenced code block containing the full file contents.",
26
+ "If a target file does not exist, create it by outputting a FILE block (not a diff): `FILE: path/to/file.ext` followed by a fenced code block containing the full file contents. Do not respond with JSON-only output; if the runtime forces JSON, include a top-level `patch` string (unified diff) or `files` array of {path, content}.",
23
27
  ].join("\n");
24
28
  const DEFAULT_JOB_PROMPT = "You are an mcoda agent that follows workspace runbooks and responds with actionable, concise output.";
25
29
  const DEFAULT_CHARACTER_PROMPT = "Write clearly, avoid hallucinations, cite assumptions, and prioritize risk mitigation for the user.";
26
30
  const estimateTokens = (text) => Math.max(1, Math.ceil((text ?? "").length / 4));
31
+ const looksLikeUnifiedDiff = (value) => {
32
+ if (/^diff --git /m.test(value) || /\*\*\* Begin Patch/.test(value))
33
+ return true;
34
+ const hasFileHeaders = /^---\s+\S+/m.test(value) && /^\+\+\+\s+\S+/m.test(value);
35
+ const hasHunk = /^@@\s+-\d+/m.test(value);
36
+ return hasFileHeaders && hasHunk;
37
+ };
27
38
  const extractPatches = (output) => {
28
- const matches = [...output.matchAll(/```(?:patch|diff)[\s\S]*?```/g)];
29
- return matches.map((m) => m[0].replace(/```(?:patch|diff)/, "").replace(/```$/, "").trim()).filter(Boolean);
39
+ const patches = new Set();
40
+ const fenceRegex = /```(\w+)?\s*\r?\n([\s\S]*?)\r?\n```/g;
41
+ for (const match of output.matchAll(fenceRegex)) {
42
+ const lang = (match[1] ?? "").toLowerCase();
43
+ const content = (match[2] ?? "").trim();
44
+ if (!content)
45
+ continue;
46
+ if (lang === "patch" || lang === "diff" || looksLikeUnifiedDiff(content)) {
47
+ patches.add(content);
48
+ }
49
+ }
50
+ const unfenced = output.replace(fenceRegex, "");
51
+ for (const match of unfenced.matchAll(/\*\*\* Begin Patch[\s\S]*?\*\*\* End Patch/g)) {
52
+ const content = match[0].trim();
53
+ if (content)
54
+ patches.add(content);
55
+ }
56
+ for (const match of unfenced.matchAll(/^diff --git [\s\S]*?(?=^diff --git |\s*$)/gm)) {
57
+ const content = match[0].trim();
58
+ if (content)
59
+ patches.add(content);
60
+ }
61
+ return Array.from(patches).filter(Boolean);
62
+ };
63
+ const extractPlainCodeFence = (output) => {
64
+ const match = output.match(/```(\w+)?\s*\r?\n([\s\S]*?)\r?\n```/);
65
+ if (!match)
66
+ return null;
67
+ const lang = (match[1] ?? "").toLowerCase();
68
+ if (lang === "patch" || lang === "diff")
69
+ return null;
70
+ const content = (match[2] ?? "").trimEnd();
71
+ return content ? content : null;
72
+ };
73
+ const normalizeFileBlockPath = (value) => {
74
+ const trimmed = value.trim();
75
+ return trimmed.replace(/^[`'"]+|[`'"]+$/g, "");
30
76
  };
31
77
  const extractFileBlocks = (output) => {
32
78
  const files = [];
33
- const regex = /(?:^|\r?\n)FILE:\s*([^\r\n]+)\r?\n```[^\r\n]*\r?\n([\s\S]*?)\r?\n```/g;
79
+ const regex = /(?:^|\r?\n)\s*(?:[-*]\s*)?FILE:\s*([^\r\n]+)\r?\n```[^\r\n]*\r?\n([\s\S]*?)\r?\n```/g;
80
+ const seen = new Set();
34
81
  let match;
35
82
  while ((match = regex.exec(output)) !== null) {
36
- const filePath = match[1]?.trim();
83
+ const filePath = normalizeFileBlockPath(match[1] ?? "");
37
84
  if (!filePath)
38
85
  continue;
39
- files.push({ path: filePath, content: match[2] ?? "" });
86
+ const content = match[2] ?? "";
87
+ const key = `${filePath}::${content.length}`;
88
+ if (!seen.has(key)) {
89
+ files.push({ path: filePath, content });
90
+ seen.add(key);
91
+ }
92
+ }
93
+ if (!files.length) {
94
+ const lines = output.split(/\r?\n/);
95
+ let currentPath = null;
96
+ let buffer = [];
97
+ const flush = () => {
98
+ if (!currentPath)
99
+ return;
100
+ let content = buffer.join("\n");
101
+ const trimmed = content.trim();
102
+ if (trimmed.startsWith("```") && trimmed.endsWith("```")) {
103
+ const contentLines = content.split(/\r?\n/);
104
+ contentLines.shift();
105
+ contentLines.pop();
106
+ content = contentLines.join("\n");
107
+ }
108
+ const key = `${currentPath}::${content.length}`;
109
+ if (!seen.has(key)) {
110
+ files.push({ path: currentPath, content });
111
+ seen.add(key);
112
+ }
113
+ currentPath = null;
114
+ buffer = [];
115
+ };
116
+ for (const line of lines) {
117
+ const fileMatch = line.match(/^\s*(?:[-*]\s*)?FILE:\s*(.+)$/);
118
+ if (fileMatch) {
119
+ flush();
120
+ currentPath = normalizeFileBlockPath(fileMatch[1] ?? "");
121
+ if (!currentPath) {
122
+ currentPath = null;
123
+ }
124
+ buffer = [];
125
+ continue;
126
+ }
127
+ if (currentPath)
128
+ buffer.push(line);
129
+ }
130
+ flush();
40
131
  }
41
132
  return files;
42
133
  };
134
+ const looksLikeJsonOutput = (output) => {
135
+ const trimmed = output.trim();
136
+ if (!trimmed)
137
+ return false;
138
+ if (/```json/i.test(output))
139
+ return true;
140
+ return trimmed.startsWith("{") || trimmed.startsWith("[");
141
+ };
142
+ const parseJsonPayload = (output) => {
143
+ const candidates = [];
144
+ const fenced = output.match(/```json([\s\S]*?)```/i);
145
+ if (fenced?.[1])
146
+ candidates.push(fenced[1]);
147
+ const trimmed = output.trim();
148
+ if (trimmed)
149
+ candidates.push(trimmed);
150
+ if (!trimmed.startsWith("{") && !trimmed.startsWith("[")) {
151
+ const firstBrace = output.indexOf("{");
152
+ const firstBracket = output.indexOf("[");
153
+ const start = firstBrace >= 0 && firstBracket >= 0 ? Math.min(firstBrace, firstBracket) : Math.max(firstBrace, firstBracket);
154
+ const endBrace = output.lastIndexOf("}");
155
+ const endBracket = output.lastIndexOf("]");
156
+ const end = endBrace >= 0 && endBracket >= 0 ? Math.max(endBrace, endBracket) : Math.max(endBrace, endBracket);
157
+ if (start >= 0 && end > start) {
158
+ candidates.push(output.slice(start, end + 1));
159
+ }
160
+ }
161
+ for (const candidate of candidates) {
162
+ const normalized = candidate.trim();
163
+ if (!normalized)
164
+ continue;
165
+ if (!normalized.startsWith("{") && !normalized.startsWith("["))
166
+ continue;
167
+ try {
168
+ return JSON.parse(normalized);
169
+ }
170
+ catch {
171
+ /* try next candidate */
172
+ }
173
+ }
174
+ return null;
175
+ };
176
+ const extractPatchesFromJson = (payload) => {
177
+ const patches = new Set();
178
+ const seen = new Set();
179
+ const addPatchText = (value) => {
180
+ const trimmed = value.trim();
181
+ if (!trimmed)
182
+ return;
183
+ const extracted = extractPatches(trimmed);
184
+ if (extracted.length) {
185
+ extracted.forEach((patch) => patches.add(patch));
186
+ return;
187
+ }
188
+ if (looksLikeUnifiedDiff(trimmed)) {
189
+ patches.add(trimmed);
190
+ }
191
+ };
192
+ const visit = (value) => {
193
+ if (!value || typeof value !== "object") {
194
+ if (typeof value === "string")
195
+ addPatchText(value);
196
+ return;
197
+ }
198
+ if (seen.has(value))
199
+ return;
200
+ seen.add(value);
201
+ if (Array.isArray(value)) {
202
+ value.forEach(visit);
203
+ return;
204
+ }
205
+ const record = value;
206
+ const directKeys = ["patch", "diff", "unified_diff", "unifiedDiff", "patchText", "patches", "diffs"];
207
+ for (const key of directKeys) {
208
+ if (record[key] === undefined)
209
+ continue;
210
+ visit(record[key]);
211
+ }
212
+ Object.values(record).forEach(visit);
213
+ };
214
+ visit(payload);
215
+ return Array.from(patches);
216
+ };
217
+ const extractFileBlocksFromJson = (payload) => {
218
+ const files = new Map();
219
+ const seen = new Set();
220
+ const addFile = (filePath, content) => {
221
+ const normalizedPath = filePath.trim();
222
+ if (!normalizedPath)
223
+ return;
224
+ files.set(normalizedPath, content);
225
+ };
226
+ const visit = (value) => {
227
+ if (!value || typeof value !== "object")
228
+ return;
229
+ if (seen.has(value))
230
+ return;
231
+ seen.add(value);
232
+ if (Array.isArray(value)) {
233
+ value.forEach(visit);
234
+ return;
235
+ }
236
+ const record = value;
237
+ if (typeof record.path === "string" && typeof record.content === "string") {
238
+ addFile(record.path, record.content);
239
+ }
240
+ if (typeof record.file === "string" && typeof record.contents === "string") {
241
+ addFile(record.file, record.contents);
242
+ }
243
+ const fileContainers = ["files", "fileBlocks", "file_blocks", "newFiles", "writeFiles"];
244
+ for (const key of fileContainers) {
245
+ const container = record[key];
246
+ if (!container || typeof container !== "object")
247
+ continue;
248
+ if (Array.isArray(container)) {
249
+ container.forEach(visit);
250
+ continue;
251
+ }
252
+ const entries = Object.entries(container);
253
+ if (entries.length && entries.every(([, val]) => typeof val === "string")) {
254
+ entries.forEach(([filePath, content]) => addFile(filePath, content));
255
+ }
256
+ else {
257
+ visit(container);
258
+ }
259
+ }
260
+ Object.values(record).forEach(visit);
261
+ };
262
+ visit(payload);
263
+ return Array.from(files.entries()).map(([filePath, content]) => ({ path: filePath, content }));
264
+ };
265
+ const extractAgentChanges = (output) => {
266
+ const placeholderRegex = /\?\?\?|rest of existing code/i;
267
+ let patches = extractPatches(output);
268
+ let fileBlocks = extractFileBlocks(output);
269
+ let jsonDetected = false;
270
+ if (patches.length) {
271
+ patches = patches.filter((patch) => !placeholderRegex.test(patch));
272
+ }
273
+ if (patches.length === 0 && fileBlocks.length === 0) {
274
+ const payload = parseJsonPayload(output);
275
+ if (payload) {
276
+ jsonDetected = true;
277
+ patches = extractPatchesFromJson(payload);
278
+ fileBlocks = extractFileBlocksFromJson(payload);
279
+ if (patches.length) {
280
+ patches = patches.filter((patch) => !placeholderRegex.test(patch));
281
+ }
282
+ }
283
+ else if (looksLikeJsonOutput(output)) {
284
+ jsonDetected = true;
285
+ }
286
+ }
287
+ return { patches, fileBlocks, jsonDetected };
288
+ };
289
+ const splitFileBlocksByExistence = (fileBlocks, cwd) => {
290
+ const existing = [];
291
+ const remaining = [];
292
+ for (const block of fileBlocks) {
293
+ const resolved = path.resolve(cwd, block.path);
294
+ if (fs.existsSync(resolved)) {
295
+ existing.push(block.path);
296
+ }
297
+ else {
298
+ remaining.push(block);
299
+ }
300
+ }
301
+ return { existing, remaining };
302
+ };
303
+ const normalizeStringArray = (value) => {
304
+ if (!Array.isArray(value))
305
+ return [];
306
+ return value
307
+ .filter((item) => typeof item === "string")
308
+ .map((item) => item.trim())
309
+ .filter(Boolean);
310
+ };
311
+ const normalizeTestCommands = (value) => {
312
+ if (typeof value === "string") {
313
+ const trimmed = value.trim();
314
+ return trimmed ? [trimmed] : [];
315
+ }
316
+ return normalizeStringArray(value);
317
+ };
318
+ const normalizeTestRequirements = (value) => {
319
+ const raw = value && typeof value === "object" ? value : {};
320
+ return {
321
+ unit: normalizeStringArray(raw.unit),
322
+ component: normalizeStringArray(raw.component),
323
+ integration: normalizeStringArray(raw.integration),
324
+ api: normalizeStringArray(raw.api),
325
+ };
326
+ };
327
+ const hasTestRequirements = (requirements) => requirements.unit.length > 0 ||
328
+ requirements.component.length > 0 ||
329
+ requirements.integration.length > 0 ||
330
+ requirements.api.length > 0;
331
+ const formatTestRequirementsNote = (requirements) => {
332
+ const parts = [];
333
+ if (requirements.unit.length)
334
+ parts.push(`Unit: ${requirements.unit.join("; ")}`);
335
+ if (requirements.component.length)
336
+ parts.push(`Component: ${requirements.component.join("; ")}`);
337
+ if (requirements.integration.length)
338
+ parts.push(`Integration: ${requirements.integration.join("; ")}`);
339
+ if (requirements.api.length)
340
+ parts.push(`API: ${requirements.api.join("; ")}`);
341
+ return parts.length ? `Required tests: ${parts.join(" | ")}` : "";
342
+ };
343
+ const truncateText = (value, maxChars = DEFAULT_TEST_OUTPUT_CHARS) => {
344
+ if (!value)
345
+ return "";
346
+ if (value.length <= maxChars)
347
+ return value;
348
+ return `${value.slice(0, maxChars)}...`;
349
+ };
350
+ const formatTestFailureSummary = (results) => {
351
+ return results
352
+ .map((result) => {
353
+ const stdout = truncateText(result.stdout ?? "");
354
+ const stderr = truncateText(result.stderr ?? "");
355
+ const lines = [
356
+ `Command: ${result.command}`,
357
+ `Exit code: ${result.code}`,
358
+ stdout ? `Stdout: ${stdout}` : undefined,
359
+ stderr ? `Stderr: ${stderr}` : undefined,
360
+ ].filter(Boolean);
361
+ return lines.join("\n");
362
+ })
363
+ .join("\n\n");
364
+ };
365
+ const detectDefaultTestCommand = (workspaceRoot) => {
366
+ const hasPnpm = fs.existsSync(path.join(workspaceRoot, "pnpm-lock.yaml")) ||
367
+ fs.existsSync(path.join(workspaceRoot, "pnpm-workspace.yaml"));
368
+ const hasYarn = fs.existsSync(path.join(workspaceRoot, "yarn.lock"));
369
+ const hasNpmLock = fs.existsSync(path.join(workspaceRoot, "package-lock.json")) ||
370
+ fs.existsSync(path.join(workspaceRoot, "npm-shrinkwrap.json"));
371
+ const hasPackageJson = fs.existsSync(path.join(workspaceRoot, "package.json"));
372
+ if (hasPnpm)
373
+ return "pnpm test";
374
+ if (hasYarn)
375
+ return "yarn test";
376
+ if (hasNpmLock || hasPackageJson)
377
+ return "npm test";
378
+ return undefined;
379
+ };
380
+ const quoteShellPath = (value) => (value.includes(" ") ? `"${value}"` : value);
381
+ const findNearestPackageRoot = (workspaceRoot, filePath) => {
382
+ const resolved = path.isAbsolute(filePath) ? filePath : path.join(workspaceRoot, filePath);
383
+ let current = resolved;
384
+ if (fs.existsSync(resolved)) {
385
+ const stat = fs.statSync(resolved);
386
+ if (stat.isFile()) {
387
+ current = path.dirname(resolved);
388
+ }
389
+ }
390
+ else {
391
+ current = path.dirname(resolved);
392
+ }
393
+ const root = path.resolve(workspaceRoot);
394
+ while (true) {
395
+ if (fs.existsSync(path.join(current, "package.json")))
396
+ return current;
397
+ if (current === root)
398
+ break;
399
+ const parent = path.dirname(current);
400
+ if (parent === current)
401
+ break;
402
+ current = parent;
403
+ }
404
+ return undefined;
405
+ };
406
+ const resolveScopedPackageRoot = (workspaceRoot, files) => {
407
+ if (!files.length)
408
+ return undefined;
409
+ const candidates = files
410
+ .map((file) => findNearestPackageRoot(workspaceRoot, file))
411
+ .filter((value) => Boolean(value))
412
+ .map((value) => path.resolve(value));
413
+ if (!candidates.length)
414
+ return undefined;
415
+ const unique = Array.from(new Set(candidates));
416
+ unique.sort((a, b) => b.length - a.length);
417
+ return unique[0];
418
+ };
419
+ const detectPackageManager = (workspaceRoot, packageRoot) => {
420
+ const hasPnpm = fs.existsSync(path.join(workspaceRoot, "pnpm-lock.yaml")) ||
421
+ fs.existsSync(path.join(workspaceRoot, "pnpm-workspace.yaml")) ||
422
+ fs.existsSync(path.join(packageRoot, "pnpm-lock.yaml"));
423
+ if (hasPnpm)
424
+ return "pnpm";
425
+ const hasYarn = fs.existsSync(path.join(workspaceRoot, "yarn.lock")) || fs.existsSync(path.join(packageRoot, "yarn.lock"));
426
+ if (hasYarn)
427
+ return "yarn";
428
+ const hasNpmLock = fs.existsSync(path.join(workspaceRoot, "package-lock.json")) ||
429
+ fs.existsSync(path.join(workspaceRoot, "npm-shrinkwrap.json")) ||
430
+ fs.existsSync(path.join(packageRoot, "package-lock.json")) ||
431
+ fs.existsSync(path.join(packageRoot, "npm-shrinkwrap.json"));
432
+ const hasPackageJson = fs.existsSync(path.join(packageRoot, "package.json"));
433
+ if (hasNpmLock || hasPackageJson)
434
+ return "npm";
435
+ return undefined;
436
+ };
437
+ const buildScopedTestCommand = (workspaceRoot, packageRoot) => {
438
+ const manager = detectPackageManager(workspaceRoot, packageRoot);
439
+ if (!manager)
440
+ return undefined;
441
+ const relative = path.relative(workspaceRoot, packageRoot).split(path.sep).join("/");
442
+ const target = relative && relative !== "" ? relative : ".";
443
+ const quoted = quoteShellPath(target);
444
+ if (manager === "pnpm")
445
+ return target === "." ? "pnpm test" : `pnpm -C ${quoted} test`;
446
+ if (manager === "yarn")
447
+ return target === "." ? "yarn test" : `yarn --cwd ${quoted} test`;
448
+ if (manager === "npm")
449
+ return target === "." ? "npm test" : `npm --prefix ${quoted} test`;
450
+ return undefined;
451
+ };
452
+ const detectScopedTestCommand = (workspaceRoot, files) => {
453
+ const scopedRoot = resolveScopedPackageRoot(workspaceRoot, files);
454
+ if (scopedRoot) {
455
+ const scopedCommand = buildScopedTestCommand(workspaceRoot, scopedRoot);
456
+ if (scopedCommand)
457
+ return scopedCommand;
458
+ }
459
+ return detectDefaultTestCommand(workspaceRoot);
460
+ };
461
+ const resolveNodeCommand = () => {
462
+ const execPath = process.execPath;
463
+ return execPath.includes(" ") ? `"${execPath}"` : execPath;
464
+ };
465
+ const detectRunAllTestsCommand = (workspaceRoot) => {
466
+ const scriptPath = path.join(workspaceRoot, "tests", "all.js");
467
+ if (!fs.existsSync(scriptPath))
468
+ return undefined;
469
+ const relative = path.relative(workspaceRoot, scriptPath).split(path.sep).join("/");
470
+ return `${resolveNodeCommand()} ${relative}`;
471
+ };
472
+ const pickSeedTestCategory = (requirements) => {
473
+ const order = ["unit", "component", "integration", "api"];
474
+ const active = order.filter((key) => requirements[key].length > 0);
475
+ if (active.length === 1)
476
+ return active[0];
477
+ return "unit";
478
+ };
479
+ const buildRunAllTestsScript = (seedCategory, seedCommands) => {
480
+ const suites = {
481
+ unit: [],
482
+ component: [],
483
+ integration: [],
484
+ api: [],
485
+ };
486
+ if (seedCommands.length) {
487
+ suites[seedCategory] = seedCommands;
488
+ }
489
+ return [
490
+ "#!/usr/bin/env node",
491
+ 'import { spawnSync } from "node:child_process";',
492
+ "",
493
+ "// Register test commands per discipline.",
494
+ `const testSuites = ${JSON.stringify(suites, null, 2)};`,
495
+ "",
496
+ 'const entries = Object.entries(testSuites).flatMap(([label, commands]) =>',
497
+ " commands.map((command) => ({ label, command }))",
498
+ ");",
499
+ "if (!entries.length) {",
500
+ ' console.error("No test commands registered in tests/all.js. Add unit/component/integration/api commands.");',
501
+ " process.exit(1);",
502
+ "}",
503
+ "",
504
+ 'console.log("MCODA_RUN_ALL_TESTS_START");',
505
+ "let failed = false;",
506
+ "for (const entry of entries) {",
507
+ " const result = spawnSync(entry.command, { shell: true, stdio: \"inherit\" });",
508
+ " const status = typeof result.status === \"number\" ? result.status : 1;",
509
+ " if (status !== 0) failed = true;",
510
+ "}",
511
+ 'console.log("MCODA_RUN_ALL_TESTS_END");',
512
+ "process.exit(failed ? 1 : 0);",
513
+ "",
514
+ ].join("\n");
515
+ };
516
+ const ensureRunAllTestsScript = async (workspaceRoot, requirements, seedCommands) => {
517
+ const scriptPath = path.join(workspaceRoot, "tests", "all.js");
518
+ if (fs.existsSync(scriptPath))
519
+ return false;
520
+ await PathHelper.ensureDir(path.dirname(scriptPath));
521
+ const seedCategory = pickSeedTestCategory(requirements);
522
+ const contents = buildRunAllTestsScript(seedCategory, seedCommands);
523
+ await fs.promises.writeFile(scriptPath, contents, "utf8");
524
+ return true;
525
+ };
526
+ const sanitizeTestCommands = (commands, workspaceRoot) => {
527
+ if (!commands.length)
528
+ return { commands, skipped: [] };
529
+ const hasPackageJson = fs.existsSync(path.join(workspaceRoot, "package.json"));
530
+ const skipped = [];
531
+ const sanitized = commands.filter((command) => {
532
+ const trimmed = command.trim();
533
+ if (!trimmed)
534
+ return false;
535
+ const normalized = trimmed.replace(/\s+/g, " ");
536
+ const isPkgManager = /^(npm|yarn|pnpm)\b/i.test(normalized);
537
+ if (!isPkgManager)
538
+ return true;
539
+ const hasExplicitCwd = /\s(--prefix|-C)\s|\s(--prefix|-C)=/i.test(normalized);
540
+ if (!hasPackageJson && !hasExplicitCwd) {
541
+ skipped.push(command);
542
+ return false;
543
+ }
544
+ return true;
545
+ });
546
+ return { commands: sanitized, skipped };
547
+ };
43
548
  const touchedFilesFromPatch = (patch) => {
44
549
  const files = new Set();
45
550
  const regex = /^\+\+\+\s+b\/([^\s]+)/gm;
@@ -50,6 +555,11 @@ const touchedFilesFromPatch = (patch) => {
50
555
  return Array.from(files);
51
556
  };
52
557
  const normalizePaths = (workspaceRoot, files) => files.map((f) => path.relative(workspaceRoot, path.isAbsolute(f) ? f : path.join(workspaceRoot, f))).map((f) => f.replace(/\\/g, "/"));
558
+ const resolveLockTtlSeconds = (maxAgentSeconds) => {
559
+ if (!maxAgentSeconds || maxAgentSeconds <= 0)
560
+ return TASK_LOCK_TTL_SECONDS;
561
+ return Math.max(1, Math.min(TASK_LOCK_TTL_SECONDS, maxAgentSeconds + 60));
562
+ };
53
563
  const MCODA_GITIGNORE_ENTRY = ".mcoda/\n";
54
564
  const WORK_DIR = (jobId, workspaceRoot) => path.join(workspaceRoot, ".mcoda", "jobs", jobId, "work");
55
565
  const maybeConvertApplyPatch = (patch) => {
@@ -158,6 +668,95 @@ const ensureDiffHeader = (patch) => {
158
668
  }
159
669
  return result.join("\n");
160
670
  };
671
+ const normalizeDiffPaths = (patch, workspaceRoot) => {
672
+ const rootEntries = new Set();
673
+ try {
674
+ fs.readdirSync(workspaceRoot, { withFileTypes: true }).forEach((entry) => {
675
+ rootEntries.add(entry.name);
676
+ });
677
+ }
678
+ catch {
679
+ /* ignore */
680
+ }
681
+ const normalizePath = (raw) => {
682
+ let value = raw.trim();
683
+ value = value.replace(/^file:\s*/i, "");
684
+ value = value.replace(/^a\//, "").replace(/^b\//, "");
685
+ if (value === "/dev/null")
686
+ return value;
687
+ const original = value;
688
+ const absolute = path.isAbsolute(original);
689
+ if (absolute) {
690
+ const relative = path.relative(workspaceRoot, original);
691
+ const normalizedRelative = relative.replace(/\\/g, "/");
692
+ if (!normalizedRelative.startsWith("..") && normalizedRelative !== "") {
693
+ return normalizedRelative;
694
+ }
695
+ const segments = original.replace(/\\/g, "/").split("/").filter(Boolean);
696
+ for (let i = 0; i < segments.length; i += 1) {
697
+ if (rootEntries.has(segments[i])) {
698
+ return segments.slice(i).join("/");
699
+ }
700
+ }
701
+ }
702
+ return original.replace(/\\/g, "/");
703
+ };
704
+ return patch
705
+ .split(/\r?\n/)
706
+ .map((line) => {
707
+ if (line.startsWith("diff --git ")) {
708
+ const parts = line.split(" ");
709
+ if (parts.length >= 4) {
710
+ const left = normalizePath(parts[2]);
711
+ const right = normalizePath(parts[3]);
712
+ return `diff --git a/${left} b/${right}`;
713
+ }
714
+ return line;
715
+ }
716
+ if (line.startsWith("--- ")) {
717
+ const rest = line.slice(4).trim();
718
+ if (rest === "/dev/null")
719
+ return line;
720
+ const normalized = normalizePath(rest);
721
+ return `--- a/${normalized}`;
722
+ }
723
+ if (line.startsWith("+++ ")) {
724
+ const rest = line.slice(4).trim();
725
+ if (rest === "/dev/null")
726
+ return line;
727
+ const normalized = normalizePath(rest);
728
+ return `+++ b/${normalized}`;
729
+ }
730
+ return line;
731
+ })
732
+ .join("\n");
733
+ };
734
+ const convertMissingFilePatchToAdd = (patch, workspaceRoot) => {
735
+ if (!/@@\s+-0,0\s+\+\d+/m.test(patch))
736
+ return patch;
737
+ const files = touchedFilesFromPatch(patch);
738
+ if (!files.length)
739
+ return patch;
740
+ let updated = patch;
741
+ let changed = false;
742
+ for (const file of files) {
743
+ const resolved = path.join(workspaceRoot, file);
744
+ if (fs.existsSync(resolved))
745
+ continue;
746
+ const escaped = file.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
747
+ const minus = new RegExp(`^---\\s+(?:a/)?${escaped}$`, "m");
748
+ if (minus.test(updated)) {
749
+ updated = updated.replace(minus, "--- /dev/null");
750
+ changed = true;
751
+ }
752
+ const plus = new RegExp(`^\\+\\+\\+\\s+(?:b/)?${escaped}$`, "m");
753
+ if (plus.test(updated)) {
754
+ updated = updated.replace(plus, `+++ b/${file}`);
755
+ changed = true;
756
+ }
757
+ }
758
+ return changed ? ensureDiffHeader(updated) : updated;
759
+ };
161
760
  const stripInvalidIndexLines = (patch) => patch
162
761
  .split(/\r?\n/)
163
762
  .filter((line) => {
@@ -234,7 +833,23 @@ const fixMissingPrefixesInHunks = (patch) => {
234
833
  const lines = patch.split(/\r?\n/);
235
834
  const out = [];
236
835
  let inHunk = false;
836
+ let addFile = false;
237
837
  for (const line of lines) {
838
+ if (line.startsWith("diff --git ")) {
839
+ inHunk = false;
840
+ addFile = false;
841
+ out.push(line);
842
+ continue;
843
+ }
844
+ if (line.startsWith("--- ")) {
845
+ addFile = line.includes("/dev/null");
846
+ out.push(line);
847
+ continue;
848
+ }
849
+ if (line.startsWith("+++ ")) {
850
+ out.push(line);
851
+ continue;
852
+ }
238
853
  if (line.startsWith("@@")) {
239
854
  inHunk = true;
240
855
  out.push(line);
@@ -246,7 +861,11 @@ const fixMissingPrefixesInHunks = (patch) => {
246
861
  out.push(line);
247
862
  continue;
248
863
  }
249
- if (!/^[+\-\s]/.test(line) && line.trim().length) {
864
+ if (!line.length) {
865
+ out.push(addFile ? "+" : " ");
866
+ continue;
867
+ }
868
+ if (!/^[+\-\s]/.test(line)) {
250
869
  out.push(`+${line}`);
251
870
  continue;
252
871
  }
@@ -283,10 +902,87 @@ const parseAddedFileContents = (patch) => {
283
902
  }
284
903
  return Object.fromEntries(Object.entries(additions).map(([file, content]) => [file, content.join("\n")]));
285
904
  };
905
+ const parseAddOnlyPatchContents = (patch) => {
906
+ const lines = patch.split(/\r?\n/);
907
+ const additions = {};
908
+ let currentFile = null;
909
+ let inHunk = false;
910
+ let sawContextOrRemoval = false;
911
+ for (const line of lines) {
912
+ if (line.startsWith("diff --git ")) {
913
+ currentFile = null;
914
+ inHunk = false;
915
+ continue;
916
+ }
917
+ if (line.startsWith("+++ ")) {
918
+ const pathLine = line.replace(/^\+\+\+\s+/, "").trim();
919
+ if (pathLine && pathLine !== "/dev/null") {
920
+ currentFile = pathLine.replace(/^b\//, "");
921
+ }
922
+ continue;
923
+ }
924
+ if (line.startsWith("@@")) {
925
+ inHunk = true;
926
+ continue;
927
+ }
928
+ if (!inHunk)
929
+ continue;
930
+ if (line.startsWith("diff --git ") || line.startsWith("--- ") || line.startsWith("+++ ") || line.startsWith("*** End Patch")) {
931
+ inHunk = false;
932
+ continue;
933
+ }
934
+ if (line.startsWith("+") && !line.startsWith("+++")) {
935
+ if (currentFile) {
936
+ if (!additions[currentFile])
937
+ additions[currentFile] = [];
938
+ additions[currentFile].push(line.slice(1));
939
+ }
940
+ continue;
941
+ }
942
+ if (!line.length) {
943
+ if (currentFile) {
944
+ if (!additions[currentFile])
945
+ additions[currentFile] = [];
946
+ additions[currentFile].push("");
947
+ }
948
+ continue;
949
+ }
950
+ if (line.startsWith("-") || line.startsWith(" ")) {
951
+ sawContextOrRemoval = true;
952
+ }
953
+ }
954
+ if (sawContextOrRemoval)
955
+ return {};
956
+ return Object.fromEntries(Object.entries(additions).map(([file, content]) => [file, content.join("\n")]));
957
+ };
286
958
  const updateAddPatchForExistingFile = (patch, existingFiles, cwd) => {
287
959
  const additions = parseAddedFileContents(patch);
288
960
  const skipped = [];
289
961
  let updated = patch;
962
+ if (existingFiles.size > 0) {
963
+ const existingRelative = new Set(Array.from(existingFiles).map((absolute) => path.relative(cwd, absolute).replace(/\\/g, "/")));
964
+ let currentFile = null;
965
+ const out = [];
966
+ for (const line of updated.split(/\r?\n/)) {
967
+ if (line.startsWith("diff --git ")) {
968
+ const parts = line.split(" ");
969
+ const raw = parts[3] ?? parts[2] ?? "";
970
+ currentFile = raw.replace(/^b\//, "").replace(/^a\//, "");
971
+ out.push(line);
972
+ continue;
973
+ }
974
+ const currentExists = currentFile ? existingRelative.has(currentFile) : false;
975
+ if (currentExists && line.startsWith("new file mode")) {
976
+ continue;
977
+ }
978
+ if (currentExists && line.startsWith("--- /dev/null")) {
979
+ out.push(`--- a/${currentFile}`);
980
+ continue;
981
+ }
982
+ out.push(line);
983
+ }
984
+ updated = out.join("\n");
985
+ }
290
986
  for (const file of Object.keys(additions)) {
291
987
  const absolute = path.join(cwd, file);
292
988
  if (!existingFiles.has(absolute))
@@ -356,6 +1052,7 @@ export class WorkOnTasksService {
356
1052
  this.stateService = deps.stateService ?? new TaskStateService(deps.workspaceRepo);
357
1053
  this.vcs = deps.vcsClient ?? new VcsClient();
358
1054
  this.routingService = deps.routingService;
1055
+ this.ratingService = deps.ratingService;
359
1056
  }
360
1057
  async loadPrompts(agentId) {
361
1058
  const agentPrompts = "getPrompts" in this.deps.agentService ? await this.deps.agentService.getPrompts(agentId) : undefined;
@@ -476,6 +1173,26 @@ export class WorkOnTasksService {
476
1173
  });
477
1174
  return resolved.agent;
478
1175
  }
1176
+ ensureRatingService() {
1177
+ if (!this.ratingService) {
1178
+ this.ratingService = new AgentRatingService(this.workspace, {
1179
+ workspaceRepo: this.deps.workspaceRepo,
1180
+ globalRepo: this.deps.repo,
1181
+ agentService: this.deps.agentService,
1182
+ routingService: this.routingService,
1183
+ });
1184
+ }
1185
+ return this.ratingService;
1186
+ }
1187
+ resolveTaskComplexity(task) {
1188
+ const metadata = task.metadata ?? {};
1189
+ const metaComplexity = typeof metadata.complexity === "number" && Number.isFinite(metadata.complexity) ? metadata.complexity : undefined;
1190
+ const storyPoints = typeof task.storyPoints === "number" && Number.isFinite(task.storyPoints) ? task.storyPoints : undefined;
1191
+ const candidate = metaComplexity ?? storyPoints;
1192
+ if (!Number.isFinite(candidate ?? NaN))
1193
+ return undefined;
1194
+ return Math.min(10, Math.max(1, Math.round(candidate)));
1195
+ }
479
1196
  nextLogSeq(taskRunId) {
480
1197
  const next = (this.taskLogSeq.get(taskRunId) ?? 0) + 1;
481
1198
  this.taskLogSeq.set(taskRunId, next);
@@ -541,22 +1258,86 @@ export class WorkOnTasksService {
541
1258
  const summary = parts.join("\n");
542
1259
  return { summary, warnings };
543
1260
  }
544
- buildPrompt(task, docSummary, fileScope) {
1261
+ parseCommentBody(body) {
1262
+ const trimmed = (body ?? "").trim();
1263
+ if (!trimmed)
1264
+ return { message: "(no details provided)" };
1265
+ const lines = trimmed.split(/\r?\n/);
1266
+ const normalize = (value) => value.trim().toLowerCase();
1267
+ const messageIndex = lines.findIndex((line) => normalize(line) === "message:");
1268
+ const suggestedIndex = lines.findIndex((line) => {
1269
+ const normalized = normalize(line);
1270
+ return normalized === "suggested_fix:" || normalized === "suggested fix:";
1271
+ });
1272
+ if (messageIndex >= 0) {
1273
+ const messageLines = lines.slice(messageIndex + 1, suggestedIndex >= 0 ? suggestedIndex : undefined);
1274
+ const message = messageLines.join("\n").trim();
1275
+ const suggestedLines = suggestedIndex >= 0 ? lines.slice(suggestedIndex + 1) : [];
1276
+ const suggestedFix = suggestedLines.join("\n").trim();
1277
+ return { message: message || trimmed, suggestedFix: suggestedFix || undefined };
1278
+ }
1279
+ if (suggestedIndex >= 0) {
1280
+ const message = lines.slice(0, suggestedIndex).join("\n").trim() || trimmed;
1281
+ const inlineFix = lines[suggestedIndex]?.split(/suggested fix:/i)[1]?.trim();
1282
+ const suggestedTail = lines.slice(suggestedIndex + 1).join("\n").trim();
1283
+ const suggestedFix = inlineFix || suggestedTail || undefined;
1284
+ return { message, suggestedFix };
1285
+ }
1286
+ return { message: trimmed };
1287
+ }
1288
+ buildCommentBacklog(comments) {
1289
+ if (!comments.length)
1290
+ return "";
1291
+ const seen = new Set();
1292
+ const lines = [];
1293
+ const toSingleLine = (value) => value.replace(/\s+/g, " ").trim();
1294
+ for (const comment of comments) {
1295
+ const slug = comment.slug?.trim() || undefined;
1296
+ const details = this.parseCommentBody(comment.body);
1297
+ const key = slug ??
1298
+ `${comment.sourceCommand}:${comment.file ?? ""}:${comment.line ?? ""}:${details.message || comment.body}`;
1299
+ if (seen.has(key))
1300
+ continue;
1301
+ seen.add(key);
1302
+ const location = comment.file
1303
+ ? `${comment.file}${typeof comment.line === "number" ? `:${comment.line}` : ""}`
1304
+ : "(location not specified)";
1305
+ const message = toSingleLine(details.message || comment.body || "(no details provided)");
1306
+ lines.push(`- [${slug ?? "untracked"}] ${location} ${message}`);
1307
+ const suggestedFix = comment.metadata?.suggestedFix ?? details.suggestedFix ?? undefined;
1308
+ if (suggestedFix) {
1309
+ lines.push(` Suggested fix: ${toSingleLine(suggestedFix)}`);
1310
+ }
1311
+ }
1312
+ return lines.join("\n");
1313
+ }
1314
+ async loadUnresolvedComments(taskId) {
1315
+ return this.deps.workspaceRepo.listTaskComments(taskId, {
1316
+ sourceCommands: ["code-review", "qa-tasks"],
1317
+ resolved: false,
1318
+ limit: 20,
1319
+ });
1320
+ }
1321
+ buildPrompt(task, docSummary, fileScope, commentBacklog) {
545
1322
  const deps = task.dependencies.keys.length ? `Depends on: ${task.dependencies.keys.join(", ")}` : "No open dependencies.";
546
1323
  const acceptance = (task.task.acceptanceCriteria ?? []).join("; ");
547
1324
  const docdexHint = docSummary ||
548
1325
  "Use docdex: search workspace docs with project key and fetch linked documents when present (doc_links metadata).";
1326
+ const backlog = commentBacklog ? `Comment backlog:\n${commentBacklog}` : "";
549
1327
  return [
550
1328
  `Task ${task.task.key}: ${task.task.title}`,
551
1329
  `Description: ${task.task.description ?? "(none)"}`,
552
1330
  `Epic: ${task.task.epicKey} (${task.task.epicTitle ?? "n/a"}), Story: ${task.task.storyKey} (${task.task.storyTitle ?? "n/a"})`,
553
1331
  `Acceptance: ${acceptance || "Refer to SDS/OpenAPI for expected behavior."}`,
554
1332
  deps,
1333
+ backlog,
555
1334
  `Allowed files: ${fileScope.length ? fileScope.join(", ") : "(not constrained)"}`,
556
1335
  `Doc context:\n${docdexHint}`,
557
- "Verify target paths against the current workspace (use docdex/file hints); do not assume hashed or generated asset names exist. If a path is missing, emit a new-file diff with full content (and parent dirs) instead of editing a non-existent file so git apply succeeds. Use JSON.parse-friendly unified diffs.",
558
- "Produce a concise plan and a patch in unified diff fenced with ```patch```.",
559
- ].join("\n");
1336
+ "Verify target paths against the current workspace (use docdex/file hints); do not assume hashed or generated asset names exist. If a path is missing, emit a new-file diff with full content (and parent dirs) instead of editing a non-existent file so git apply succeeds. Use valid unified diffs without JSON wrappers.",
1337
+ "Provide a concise plan as plain text bullet points, then output code changes. If editing existing files, emit a unified diff inside ```patch``` fences. If creating new files, emit FILE blocks. Do not output JSON (unless forced by the runtime, in which case include a top-level `patch` string or `files` array).",
1338
+ ]
1339
+ .filter(Boolean)
1340
+ .join("\n");
560
1341
  }
561
1342
  async checkoutBaseBranch(baseBranch) {
562
1343
  await this.vcs.ensureRepo(this.workspace.workspaceRoot);
@@ -645,6 +1426,7 @@ export class WorkOnTasksService {
645
1426
  await this.logTask(taskRunId, `Merge conflicts detected while merging ${baseBranch} into ${branch}.`, "vcs", {
646
1427
  conflicts,
647
1428
  });
1429
+ await this.vcs.abortMerge(this.workspace.workspaceRoot);
648
1430
  return { branch, base: baseBranch, mergeConflicts: conflicts, remoteSyncNote };
649
1431
  }
650
1432
  throw new Error(`Failed to merge ${baseBranch} into ${branch}: ${error.message}`);
@@ -743,7 +1525,9 @@ export class WorkOnTasksService {
743
1525
  for (const patch of patches) {
744
1526
  const normalized = maybeConvertApplyPatch(patch);
745
1527
  const withHeader = ensureDiffHeader(normalized);
746
- const withHunks = normalizeHunkHeaders(withHeader);
1528
+ const withPaths = normalizeDiffPaths(withHeader, cwd);
1529
+ const withAdds = convertMissingFilePatchToAdd(withPaths, cwd);
1530
+ const withHunks = normalizeHunkHeaders(withAdds);
747
1531
  const withPrefixes = fixMissingPrefixesInHunks(withHunks);
748
1532
  const sanitized = stripInvalidIndexLines(withPrefixes);
749
1533
  if (isPlaceholderPatch(sanitized)) {
@@ -791,13 +1575,14 @@ export class WorkOnTasksService {
791
1575
  catch (error) {
792
1576
  // Fallback: if the segment only adds new files and git apply fails, write the files directly.
793
1577
  const additions = parseAddedFileContents(patchToApply);
794
- const addTargets = Object.keys(additions);
1578
+ const fallbackAdditions = Object.keys(additions).length ? additions : parseAddOnlyPatchContents(patchToApply);
1579
+ const addTargets = Object.keys(fallbackAdditions);
795
1580
  if (addTargets.length && segmentFiles.length === addTargets.length) {
796
1581
  try {
797
1582
  for (const file of addTargets) {
798
1583
  const dest = path.join(cwd, file);
799
1584
  await fs.promises.mkdir(path.dirname(dest), { recursive: true });
800
- await fs.promises.writeFile(dest, additions[file], "utf8");
1585
+ await fs.promises.writeFile(dest, fallbackAdditions[file], "utf8");
801
1586
  touched.add(file);
802
1587
  }
803
1588
  applied += 1;
@@ -814,11 +1599,11 @@ export class WorkOnTasksService {
814
1599
  }
815
1600
  }
816
1601
  if (!applied && warnings.length) {
817
- return { touched: Array.from(touched), warnings, error: "No patches applied; all were skipped as placeholders." };
1602
+ return { touched: Array.from(touched), warnings, error: "No patches applied; all segments failed or were skipped." };
818
1603
  }
819
1604
  return { touched: Array.from(touched), warnings };
820
1605
  }
821
- async applyFileBlocks(files, cwd, dryRun, allowNoop = false) {
1606
+ async applyFileBlocks(files, cwd, dryRun, allowNoop = false, allowOverwrite = false) {
822
1607
  const touched = new Set();
823
1608
  const warnings = [];
824
1609
  let applied = 0;
@@ -835,7 +1620,28 @@ export class WorkOnTasksService {
835
1620
  continue;
836
1621
  }
837
1622
  if (fs.existsSync(resolved)) {
838
- warnings.push(`Skipped file block for existing file: ${relativePath}`);
1623
+ if (!allowOverwrite) {
1624
+ warnings.push(`Skipped file block for existing file: ${relativePath}`);
1625
+ continue;
1626
+ }
1627
+ if (!file.content || !file.content.trim()) {
1628
+ warnings.push(`Skipped overwrite for ${relativePath}: empty FILE block content.`);
1629
+ continue;
1630
+ }
1631
+ warnings.push(`Overwriting existing file from FILE block: ${relativePath}`);
1632
+ if (dryRun) {
1633
+ touched.add(relativePath);
1634
+ applied += 1;
1635
+ continue;
1636
+ }
1637
+ try {
1638
+ await fs.promises.writeFile(resolved, file.content, "utf8");
1639
+ touched.add(relativePath);
1640
+ applied += 1;
1641
+ }
1642
+ catch (error) {
1643
+ warnings.push(`Failed to overwrite file block ${relativePath}: ${error.message}`);
1644
+ }
839
1645
  continue;
840
1646
  }
841
1647
  if (dryRun) {
@@ -863,11 +1669,14 @@ export class WorkOnTasksService {
863
1669
  }
864
1670
  return { touched: Array.from(touched), warnings, appliedCount: applied };
865
1671
  }
866
- async runTests(commands, cwd) {
1672
+ async runTests(commands, cwd, abortSignal) {
867
1673
  const results = [];
868
1674
  for (const command of commands) {
869
1675
  try {
870
- const { stdout, stderr } = await exec(command, { cwd });
1676
+ if (abortSignal?.aborted) {
1677
+ throw new Error("work_on_tasks_aborted");
1678
+ }
1679
+ const { stdout, stderr } = await exec(command, { cwd, signal: abortSignal });
871
1680
  results.push({ command, stdout, stderr, code: 0 });
872
1681
  }
873
1682
  catch (error) {
@@ -885,10 +1694,16 @@ export class WorkOnTasksService {
885
1694
  async workOnTasks(request) {
886
1695
  await this.ensureMcoda();
887
1696
  const agentStream = request.agentStream !== false;
888
- const configuredBaseBranch = request.baseBranch ?? this.workspace.config?.branch;
889
- const baseBranch = DEFAULT_BASE_BRANCH;
890
- const baseBranchWarnings = configuredBaseBranch && configuredBaseBranch !== baseBranch
891
- ? [`Base branch forced to ${baseBranch}; ignoring configured ${configuredBaseBranch}.`]
1697
+ const configuredBaseBranch = this.workspace.config?.branch;
1698
+ const requestedBaseBranch = request.baseBranch;
1699
+ const resolvedBaseBranch = (requestedBaseBranch ?? configuredBaseBranch ?? DEFAULT_BASE_BRANCH).trim();
1700
+ const baseBranch = resolvedBaseBranch.length ? resolvedBaseBranch : DEFAULT_BASE_BRANCH;
1701
+ const configuredAutoMerge = this.workspace.config?.autoMerge;
1702
+ const configuredAutoPush = this.workspace.config?.autoPush;
1703
+ const autoMerge = request.autoMerge ?? configuredAutoMerge ?? true;
1704
+ const autoPush = request.autoPush ?? configuredAutoPush ?? true;
1705
+ const baseBranchWarnings = requestedBaseBranch && configuredBaseBranch && requestedBaseBranch !== configuredBaseBranch
1706
+ ? [`Base branch override ${requestedBaseBranch} differs from workspace config ${configuredBaseBranch}.`]
892
1707
  : [];
893
1708
  const commandRun = await this.deps.jobService.startCommandRun("work-on-tasks", request.projectKey, {
894
1709
  taskIds: request.taskKeys,
@@ -911,6 +1726,43 @@ export class WorkOnTasksService {
911
1726
  });
912
1727
  let selection;
913
1728
  let storyPointsProcessed = 0;
1729
+ const abortSignal = request.abortSignal;
1730
+ const resolveAbortReason = () => {
1731
+ const reason = abortSignal?.reason;
1732
+ if (typeof reason === "string" && reason.trim().length > 0)
1733
+ return reason;
1734
+ if (reason instanceof Error && reason.message)
1735
+ return reason.message;
1736
+ return "work_on_tasks_aborted";
1737
+ };
1738
+ const abortIfSignaled = () => {
1739
+ if (abortSignal?.aborted) {
1740
+ throw new Error(resolveAbortReason());
1741
+ }
1742
+ };
1743
+ const withAbort = async (promise) => {
1744
+ if (!abortSignal)
1745
+ return promise;
1746
+ if (abortSignal.aborted) {
1747
+ throw new Error(resolveAbortReason());
1748
+ }
1749
+ return new Promise((resolve, reject) => {
1750
+ const onAbort = () => reject(new Error(resolveAbortReason()));
1751
+ abortSignal.addEventListener("abort", onAbort, { once: true });
1752
+ promise.then(resolve, reject).finally(() => {
1753
+ abortSignal.removeEventListener("abort", onAbort);
1754
+ });
1755
+ });
1756
+ };
1757
+ const isAbortError = (message) => {
1758
+ if (!message)
1759
+ return false;
1760
+ if (message === "agent_timeout")
1761
+ return true;
1762
+ if (/abort/i.test(message))
1763
+ return true;
1764
+ return message === resolveAbortReason();
1765
+ };
914
1766
  try {
915
1767
  await this.checkoutBaseBranch(baseBranch);
916
1768
  selection = await this.selectionService.selectTasks({
@@ -1048,7 +1900,8 @@ export class WorkOnTasksService {
1048
1900
  emitLine(" ░░░░░ END OF THE TASK WORK ░░░░░");
1049
1901
  emitBlank();
1050
1902
  };
1051
- for (const [index, task] of selection.ordered.entries()) {
1903
+ taskLoop: for (const [index, task] of selection.ordered.entries()) {
1904
+ abortIfSignaled();
1052
1905
  const startedAt = new Date().toISOString();
1053
1906
  const taskRun = await this.deps.workspaceRepo.createTaskRun({
1054
1907
  taskId: task.task.id,
@@ -1064,6 +1917,7 @@ export class WorkOnTasksService {
1064
1917
  gitCommitSha: task.task.vcsLastCommitSha ?? null,
1065
1918
  });
1066
1919
  const sessionId = formatSessionId(startedAt);
1920
+ const initialStatus = (task.task.status ?? "").toLowerCase().trim();
1067
1921
  const taskAlias = `Working on task ${task.task.key}`;
1068
1922
  const taskSummary = task.task.title || task.task.description || "(none)";
1069
1923
  const modelLabel = agent.defaultModel ?? "(default)";
@@ -1078,7 +1932,9 @@ export class WorkOnTasksService {
1078
1932
  let promptEstimateTotal = 0;
1079
1933
  let mergeStatus = "skipped";
1080
1934
  let patchApplied = false;
1935
+ let runAllScriptCreated = false;
1081
1936
  let touched = [];
1937
+ let unresolvedComments = [];
1082
1938
  let taskBranchName = task.task.vcsBranch ?? null;
1083
1939
  let baseBranchName = task.task.vcsBaseBranch ?? baseBranch;
1084
1940
  let branchInfo = {
@@ -1151,6 +2007,7 @@ export class WorkOnTasksService {
1151
2007
  });
1152
2008
  };
1153
2009
  try {
2010
+ abortIfSignaled();
1154
2011
  await startPhase("selection", {
1155
2012
  dependencies: task.dependencies.keys,
1156
2013
  blockedReason: task.blockedReason,
@@ -1172,7 +2029,7 @@ export class WorkOnTasksService {
1172
2029
  taskStatus = "blocked";
1173
2030
  await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1174
2031
  await emitTaskEndOnce();
1175
- continue;
2032
+ continue taskLoop;
1176
2033
  }
1177
2034
  await endPhase("selection");
1178
2035
  }
@@ -1192,12 +2049,12 @@ export class WorkOnTasksService {
1192
2049
  taskStatus = "failed";
1193
2050
  await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1194
2051
  await emitTaskEndOnce();
1195
- continue;
2052
+ continue taskLoop;
1196
2053
  }
2054
+ const lockTtlSeconds = resolveLockTtlSeconds(request.maxAgentSeconds);
1197
2055
  let lockAcquired = false;
1198
2056
  if (!request.dryRun) {
1199
- const ttlSeconds = Math.max(1, TASK_LOCK_TTL_SECONDS);
1200
- const lockResult = await this.deps.workspaceRepo.tryAcquireTaskLock(task.task.id, taskRun.id, job.id, ttlSeconds);
2057
+ const lockResult = await this.deps.workspaceRepo.tryAcquireTaskLock(task.task.id, taskRun.id, job.id, lockTtlSeconds);
1201
2058
  if (!lockResult.acquired) {
1202
2059
  await this.logTask(taskRun.id, "Task already locked by another run; skipping.", "vcs", {
1203
2060
  lock: lockResult.lock ?? null,
@@ -1210,21 +2067,58 @@ export class WorkOnTasksService {
1210
2067
  taskStatus = "skipped";
1211
2068
  await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1212
2069
  await emitTaskEndOnce();
1213
- continue;
2070
+ continue taskLoop;
1214
2071
  }
1215
2072
  lockAcquired = true;
1216
2073
  }
1217
2074
  try {
2075
+ abortIfSignaled();
1218
2076
  const metadata = task.task.metadata ?? {};
1219
2077
  let allowedFiles = Array.isArray(metadata.files) ? normalizePaths(this.workspace.workspaceRoot, metadata.files) : [];
1220
- const testCommands = Array.isArray(metadata.tests) ? metadata.tests : [];
2078
+ const testRequirements = normalizeTestRequirements(metadata.test_requirements ?? metadata.testRequirements);
2079
+ const testRequirementsNote = formatTestRequirementsNote(testRequirements);
2080
+ let testCommands = normalizeTestCommands(metadata.tests);
2081
+ const sanitized = sanitizeTestCommands(testCommands, this.workspace.workspaceRoot);
2082
+ testCommands = sanitized.commands;
2083
+ if (sanitized.skipped.length) {
2084
+ await this.logTask(taskRun.id, `Skipped test commands without workspace package.json: ${sanitized.skipped.join("; ")}`, "tests");
2085
+ }
2086
+ if (!testCommands.length && hasTestRequirements(testRequirements)) {
2087
+ const fallbackCommand = detectScopedTestCommand(this.workspace.workspaceRoot, allowedFiles);
2088
+ if (fallbackCommand)
2089
+ testCommands = [fallbackCommand];
2090
+ }
2091
+ let runAllTestsCommandHint = detectRunAllTestsCommand(this.workspace.workspaceRoot);
2092
+ if (!runAllTestsCommandHint && !request.dryRun && hasTestRequirements(testRequirements)) {
2093
+ try {
2094
+ runAllScriptCreated = await ensureRunAllTestsScript(this.workspace.workspaceRoot, testRequirements, testCommands);
2095
+ if (runAllScriptCreated) {
2096
+ runAllTestsCommandHint = detectRunAllTestsCommand(this.workspace.workspaceRoot);
2097
+ await this.logTask(taskRun.id, "Created run-all tests script (tests/all.js).", "tests");
2098
+ }
2099
+ }
2100
+ catch (error) {
2101
+ await this.logTask(taskRun.id, `Failed to create run-all tests script: ${error instanceof Error ? error.message : String(error)}`, "tests");
2102
+ }
2103
+ }
2104
+ if (runAllScriptCreated && allowedFiles.length && !allowedFiles.includes("tests/all.js")) {
2105
+ allowedFiles = [...allowedFiles, "tests/all.js"];
2106
+ }
2107
+ if (!testCommands.length && hasTestRequirements(testRequirements) && runAllTestsCommandHint) {
2108
+ testCommands = [runAllTestsCommandHint];
2109
+ }
2110
+ const runAllTestsNote = request.dryRun
2111
+ ? ""
2112
+ : runAllTestsCommandHint
2113
+ ? `Run-all tests command: ${runAllTestsCommandHint}`
2114
+ : "Run-all tests script missing (tests/all.js). Create it and register new tests.";
2115
+ const shouldRunTests = !request.dryRun;
1221
2116
  let mergeConflicts = [];
1222
2117
  let remoteSyncNote = "";
1223
- const softFailures = [];
2118
+ let testAttemptCount = 0;
1224
2119
  let lastLockRefresh = Date.now();
1225
2120
  const getLockRefreshIntervalMs = () => {
1226
- const ttlSeconds = Math.max(1, TASK_LOCK_TTL_SECONDS);
1227
- const ttlMs = ttlSeconds * 1000;
2121
+ const ttlMs = lockTtlSeconds * 1000;
1228
2122
  return Math.max(250, Math.min(ttlMs - 250, Math.floor(ttlMs / 3)));
1229
2123
  };
1230
2124
  const refreshLock = async (label, force = false) => {
@@ -1234,8 +2128,7 @@ export class WorkOnTasksService {
1234
2128
  if (!force && now - lastLockRefresh < getLockRefreshIntervalMs())
1235
2129
  return true;
1236
2130
  try {
1237
- const ttlSeconds = Math.max(1, TASK_LOCK_TTL_SECONDS);
1238
- const refreshed = await this.deps.workspaceRepo.refreshTaskLock(task.task.id, taskRun.id, ttlSeconds);
2131
+ const refreshed = await this.deps.workspaceRepo.refreshTaskLock(task.task.id, taskRun.id, lockTtlSeconds);
1239
2132
  if (!refreshed) {
1240
2133
  await this.logTask(taskRun.id, `Task lock lost during ${label}; another run may have taken it.`, "vcs", {
1241
2134
  reason: "lock_stolen",
@@ -1255,6 +2148,21 @@ export class WorkOnTasksService {
1255
2148
  }
1256
2149
  return true;
1257
2150
  };
2151
+ if (!request.dryRun && hasTestRequirements(testRequirements) && testCommands.length === 0) {
2152
+ const message = "Task has test requirements but no test command is configured.";
2153
+ await this.logTask(taskRun.id, message, "tests", { testRequirements });
2154
+ await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "tests", "error", { error: "tests_not_configured" });
2155
+ await this.stateService.markBlocked(task.task, "tests_not_configured");
2156
+ await this.deps.workspaceRepo.updateTaskRun(taskRun.id, {
2157
+ status: "failed",
2158
+ finishedAt: new Date().toISOString(),
2159
+ });
2160
+ results.push({ taskKey: task.task.key, status: "failed", notes: "tests_not_configured" });
2161
+ taskStatus = "failed";
2162
+ await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
2163
+ await emitTaskEndOnce();
2164
+ continue taskLoop;
2165
+ }
1258
2166
  if (!request.dryRun) {
1259
2167
  try {
1260
2168
  branchInfo = await this.ensureBranches(task.task.key, baseBranch, taskRun.id);
@@ -1262,14 +2170,24 @@ export class WorkOnTasksService {
1262
2170
  baseBranchName = branchInfo.base || baseBranchName;
1263
2171
  mergeConflicts = branchInfo.mergeConflicts ?? [];
1264
2172
  remoteSyncNote = branchInfo.remoteSyncNote ?? "";
1265
- if (mergeConflicts.length && allowedFiles.length) {
1266
- allowedFiles = Array.from(new Set([...allowedFiles, ...mergeConflicts.map((f) => f.replace(/\\/g, "/"))]));
1267
- }
1268
2173
  await this.deps.workspaceRepo.updateTask(task.task.id, {
1269
2174
  vcsBranch: branchInfo.branch,
1270
2175
  vcsBaseBranch: branchInfo.base,
1271
2176
  });
1272
2177
  await this.logTask(taskRun.id, `Using branch ${branchInfo.branch} (base ${branchInfo.base})`, "vcs");
2178
+ if (mergeConflicts.length) {
2179
+ await this.logTask(taskRun.id, `Blocking task due to merge conflicts: ${mergeConflicts.join(", ")}`, "vcs");
2180
+ await this.stateService.markBlocked(task.task, "merge_conflict");
2181
+ await this.deps.workspaceRepo.updateTaskRun(taskRun.id, {
2182
+ status: "failed",
2183
+ finishedAt: new Date().toISOString(),
2184
+ });
2185
+ results.push({ taskKey: task.task.key, status: "failed", notes: "merge_conflict" });
2186
+ taskStatus = "failed";
2187
+ await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
2188
+ await emitTaskEndOnce();
2189
+ continue taskLoop;
2190
+ }
1273
2191
  }
1274
2192
  catch (error) {
1275
2193
  const message = `Failed to prepare branches: ${error.message}`;
@@ -1278,10 +2196,10 @@ export class WorkOnTasksService {
1278
2196
  results.push({ taskKey: task.task.key, status: "failed", notes: message });
1279
2197
  taskStatus = "failed";
1280
2198
  await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1281
- continue;
2199
+ continue taskLoop;
1282
2200
  }
1283
2201
  }
1284
- await startPhase("context", { allowedFiles, tests: testCommands });
2202
+ await startPhase("context", { allowedFiles, tests: testCommands, testRequirements });
1285
2203
  const docLinks = Array.isArray(metadata.doc_links) ? metadata.doc_links : [];
1286
2204
  const { summary: docSummary, warnings: docWarnings } = await this.gatherDocContext(request.projectKey, docLinks);
1287
2205
  if (docWarnings.length) {
@@ -1289,13 +2207,36 @@ export class WorkOnTasksService {
1289
2207
  await this.logTask(taskRun.id, docWarnings.join("; "), "docdex");
1290
2208
  }
1291
2209
  await endPhase("context", { docWarnings, docSummary: Boolean(docSummary) });
2210
+ const projectGuidance = await loadProjectGuidance(this.workspace.workspaceRoot);
2211
+ if (projectGuidance) {
2212
+ await this.logTask(taskRun.id, `Loaded project guidance from ${projectGuidance.source}`, "project_guidance");
2213
+ }
1292
2214
  await startPhase("prompt", { docSummary: Boolean(docSummary), agent: agent.id });
1293
- const conflictNote = mergeConflicts.length
1294
- ? `Merge conflicts detected in: ${mergeConflicts.join(", ")}. Resolve these conflicts before any other task work. Remove conflict markers and ensure the files are consistent.`
2215
+ unresolvedComments = await this.loadUnresolvedComments(task.task.id);
2216
+ const commentBacklog = this.buildCommentBacklog(unresolvedComments);
2217
+ const promptBase = this.buildPrompt(task, docSummary, allowedFiles, commentBacklog);
2218
+ const testCommandNote = testCommands.length ? `Test commands: ${testCommands.join(" && ")}` : "";
2219
+ const testExpectationNote = shouldRunTests
2220
+ ? "Tests must pass before the task can be finalized. Run task-specific tests first, then run-all tests."
1295
2221
  : "";
1296
- const promptBase = this.buildPrompt(task, docSummary, allowedFiles);
1297
- const notes = [remoteSyncNote, conflictNote].filter(Boolean).join("\n");
1298
- const prompt = notes ? `${notes}\n\n${promptBase}` : promptBase;
2222
+ const outputRequirementNote = [
2223
+ "Output requirements (strict):",
2224
+ "- Return only code changes.",
2225
+ "- For edits to existing files, output a unified diff inside ```patch fences.",
2226
+ "- For new files, output FILE blocks in this format:",
2227
+ " FILE: path/to/file.ext",
2228
+ " ```",
2229
+ " <full file contents>",
2230
+ " ```",
2231
+ "- Do not include plans, narration, or JSON unless the runtime forces it; if forced, return JSON with a top-level `patch` string or `files` array of {path, content}.",
2232
+ ].join("\n");
2233
+ const promptExtras = [testRequirementsNote, testCommandNote, runAllTestsNote, testExpectationNote, outputRequirementNote]
2234
+ .filter(Boolean)
2235
+ .join("\n");
2236
+ const promptWithTests = promptExtras ? `${promptBase}\n${promptExtras}` : promptBase;
2237
+ const guidanceBlock = projectGuidance?.content ? `Project Guidance (read first):\n${projectGuidance.content}` : "";
2238
+ const notes = remoteSyncNote;
2239
+ const prompt = [guidanceBlock, notes, promptWithTests].filter(Boolean).join("\n\n");
1299
2240
  const commandPrompt = prompts.commandPrompt ?? "";
1300
2241
  const systemPrompt = [prompts.jobPrompt, prompts.characterPrompt, commandPrompt].filter(Boolean).join("\n\n");
1301
2242
  await this.logTask(taskRun.id, `System prompt:\n${systemPrompt || "(none)"}`, "prompt");
@@ -1311,7 +2252,7 @@ export class WorkOnTasksService {
1311
2252
  results.push({ taskKey: task.task.key, status: "skipped", notes: "dry_run" });
1312
2253
  taskStatus = "skipped";
1313
2254
  await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1314
- continue;
2255
+ continue taskLoop;
1315
2256
  }
1316
2257
  try {
1317
2258
  await this.stateService.transitionToInProgress(task.task);
@@ -1331,14 +2272,20 @@ export class WorkOnTasksService {
1331
2272
  }
1332
2273
  };
1333
2274
  const invokeAgentOnce = async (input, phaseLabel) => {
2275
+ abortIfSignaled();
1334
2276
  let output = "";
1335
2277
  const started = Date.now();
1336
2278
  if (agentStream && this.deps.agentService.invokeStream) {
1337
- const stream = await this.deps.agentService.invokeStream(agent.id, {
2279
+ const stream = await withAbort(this.deps.agentService.invokeStream(agent.id, {
1338
2280
  input,
1339
2281
  metadata: { taskKey: task.task.key },
1340
- });
2282
+ }));
1341
2283
  let pollLockLost = false;
2284
+ let aborted = false;
2285
+ const onAbort = () => {
2286
+ aborted = true;
2287
+ };
2288
+ abortSignal?.addEventListener("abort", onAbort, { once: true });
1342
2289
  const refreshTimer = setInterval(() => {
1343
2290
  void refreshLock("agent_stream_poll").then((ok) => {
1344
2291
  if (!ok)
@@ -1347,6 +2294,9 @@ export class WorkOnTasksService {
1347
2294
  }, getLockRefreshIntervalMs());
1348
2295
  try {
1349
2296
  for await (const chunk of stream) {
2297
+ if (aborted) {
2298
+ throw new Error(resolveAbortReason());
2299
+ }
1350
2300
  output += chunk.output ?? "";
1351
2301
  streamChunk(chunk.output);
1352
2302
  await this.logTask(taskRun.id, chunk.output ?? "", phaseLabel);
@@ -1358,11 +2308,15 @@ export class WorkOnTasksService {
1358
2308
  }
1359
2309
  finally {
1360
2310
  clearInterval(refreshTimer);
2311
+ abortSignal?.removeEventListener("abort", onAbort);
1361
2312
  }
1362
2313
  if (pollLockLost) {
1363
2314
  await this.logTask(taskRun.id, "Aborting task: lock lost during agent stream.", "vcs");
1364
2315
  throw new Error("Task lock lost during agent stream.");
1365
2316
  }
2317
+ if (aborted) {
2318
+ throw new Error(resolveAbortReason());
2319
+ }
1366
2320
  }
1367
2321
  else {
1368
2322
  let pollLockLost = false;
@@ -1379,9 +2333,7 @@ export class WorkOnTasksService {
1379
2333
  rejectLockLost(new Error("Task lock lost during agent invoke."));
1380
2334
  });
1381
2335
  }, getLockRefreshIntervalMs());
1382
- const invokePromise = this.deps.agentService
1383
- .invoke(agent.id, { input, metadata: { taskKey: task.task.key } })
1384
- .catch((error) => {
2336
+ const invokePromise = withAbort(this.deps.agentService.invoke(agent.id, { input, metadata: { taskKey: task.task.key } })).catch((error) => {
1385
2337
  if (pollLockLost)
1386
2338
  return null;
1387
2339
  throw error;
@@ -1404,37 +2356,6 @@ export class WorkOnTasksService {
1404
2356
  }
1405
2357
  return { output, durationSeconds: (Date.now() - started) / 1000 };
1406
2358
  };
1407
- let agentOutput = "";
1408
- let agentDuration = 0;
1409
- let triedRetry = false;
1410
- const agentInput = `${systemPrompt}\n\n${prompt}`;
1411
- try {
1412
- await startPhase("agent", { agent: agent.id, stream: agentStream });
1413
- const first = await invokeAgentOnce(agentInput, "agent");
1414
- agentOutput = first.output;
1415
- agentDuration = first.durationSeconds;
1416
- await endPhase("agent", { agentDurationSeconds: agentDuration });
1417
- if (!(await refreshLock("agent"))) {
1418
- await this.logTask(taskRun.id, "Aborting task: lock lost after agent completion.", "vcs");
1419
- throw new Error("Task lock lost after agent completion.");
1420
- }
1421
- }
1422
- catch (error) {
1423
- const message = error instanceof Error ? error.message : String(error);
1424
- if (/task lock lost/i.test(message)) {
1425
- throw error;
1426
- }
1427
- await this.logTask(taskRun.id, `Agent invocation failed: ${message}`, "agent");
1428
- await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "agent", "error", { error: message });
1429
- await this.deps.workspaceRepo.updateTaskRun(taskRun.id, {
1430
- status: "failed",
1431
- finishedAt: new Date().toISOString(),
1432
- });
1433
- results.push({ taskKey: task.task.key, status: "failed", notes: message });
1434
- taskStatus = "failed";
1435
- await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1436
- continue;
1437
- }
1438
2359
  const recordUsage = async (phase, output, durationSeconds, promptText) => {
1439
2360
  const promptTokens = estimateTokens(promptText);
1440
2361
  const completionTokens = estimateTokens(output);
@@ -1455,61 +2376,201 @@ export class WorkOnTasksService {
1455
2376
  durationSeconds,
1456
2377
  });
1457
2378
  };
1458
- await recordUsage("agent", agentOutput, agentDuration, agentInput);
1459
- let patches = extractPatches(agentOutput);
1460
- let fileBlocks = extractFileBlocks(agentOutput);
1461
- if (patches.length === 0 && fileBlocks.length === 0 && !triedRetry) {
1462
- triedRetry = true;
1463
- await this.logTask(taskRun.id, "Agent output did not include a patch or file blocks; retrying with explicit output instructions.", "agent");
2379
+ const maxAttempts = shouldRunTests ? MAX_TEST_FIX_ATTEMPTS : 1;
2380
+ let testsPassed = !shouldRunTests;
2381
+ let lastTestFailureSummary = "";
2382
+ let lastTestResults = [];
2383
+ let lastTestErrorType = null;
2384
+ for (let attempt = 1; attempt <= maxAttempts; attempt++) {
2385
+ abortIfSignaled();
2386
+ const attemptNotes = [];
2387
+ if (attempt > 1) {
2388
+ attemptNotes.push(`Retry attempt ${attempt} of ${maxAttempts}.`);
2389
+ }
2390
+ if (lastTestFailureSummary) {
2391
+ attemptNotes.push("Previous test run failed. Fix the issues and update the code/tests.");
2392
+ attemptNotes.push(`Test failure summary:\n${lastTestFailureSummary}`);
2393
+ }
2394
+ const attemptPrompt = attemptNotes.length ? `${prompt}\n\n${attemptNotes.join("\n")}` : prompt;
2395
+ const agentInput = `${systemPrompt}\n\n${attemptPrompt}`;
2396
+ let agentOutput = "";
2397
+ let agentDuration = 0;
2398
+ let triedRetry = false;
2399
+ let triedPatchFallback = false;
1464
2400
  try {
1465
- const retryInput = `${systemPrompt}\n\n${prompt}\n\nOutput only code changes. If editing existing files, output a unified diff inside \`\`\`patch\`\`\` fences. If creating new files, output FILE blocks in this format:\nFILE: path/to/file.ext\n\`\`\`\n<full file contents>\n\`\`\`\nDo not include analysis or narration.`;
1466
- const retry = await invokeAgentOnce(retryInput, "agent");
1467
- agentOutput = retry.output;
1468
- agentDuration += retry.durationSeconds;
1469
- await recordUsage("agent_retry", retry.output, retry.durationSeconds, retryInput);
1470
- patches = extractPatches(agentOutput);
1471
- fileBlocks = extractFileBlocks(agentOutput);
2401
+ await startPhase("agent", { agent: agent.id, stream: agentStream, attempt, maxAttempts });
2402
+ const first = await invokeAgentOnce(agentInput, "agent");
2403
+ agentOutput = first.output;
2404
+ agentDuration = first.durationSeconds;
2405
+ await endPhase("agent", { agentDurationSeconds: agentDuration, attempt });
2406
+ if (!(await refreshLock("agent"))) {
2407
+ await this.logTask(taskRun.id, "Aborting task: lock lost after agent completion.", "vcs");
2408
+ throw new Error("Task lock lost after agent completion.");
2409
+ }
1472
2410
  }
1473
2411
  catch (error) {
1474
2412
  const message = error instanceof Error ? error.message : String(error);
1475
- await this.logTask(taskRun.id, `Agent retry failed: ${message}`, "agent");
2413
+ if (/task lock lost/i.test(message)) {
2414
+ throw error;
2415
+ }
2416
+ await this.logTask(taskRun.id, `Agent invocation failed: ${message}`, "agent");
2417
+ await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "agent", "error", { error: message, attempt });
2418
+ await this.deps.workspaceRepo.updateTaskRun(taskRun.id, {
2419
+ status: "failed",
2420
+ finishedAt: new Date().toISOString(),
2421
+ });
2422
+ results.push({ taskKey: task.task.key, status: "failed", notes: message });
2423
+ taskStatus = "failed";
2424
+ await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
2425
+ continue taskLoop;
1476
2426
  }
1477
- }
1478
- if (patches.length === 0 && fileBlocks.length === 0) {
1479
- const message = "Agent output did not include a patch or file blocks.";
1480
- await this.logTask(taskRun.id, message, "agent");
1481
- await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
1482
- await this.stateService.markBlocked(task.task, "missing_patch");
1483
- results.push({ taskKey: task.task.key, status: "failed", notes: "missing_patch" });
1484
- taskStatus = "failed";
1485
- await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1486
- continue;
1487
- }
1488
- if (patches.length || fileBlocks.length) {
1489
- if (!(await refreshLock("apply_start", true))) {
1490
- await this.logTask(taskRun.id, "Aborting task: lock lost before apply.", "vcs");
1491
- throw new Error("Task lock lost before apply.");
2427
+ await recordUsage("agent", agentOutput, agentDuration, agentInput);
2428
+ let { patches, fileBlocks, jsonDetected } = extractAgentChanges(agentOutput);
2429
+ if (fileBlocks.length && patches.length) {
2430
+ const { existing, remaining } = splitFileBlocksByExistence(fileBlocks, this.workspace.workspaceRoot);
2431
+ if (existing.length) {
2432
+ await this.logTask(taskRun.id, `Skipped FILE blocks for existing files: ${existing.join(", ")}`, "agent");
2433
+ }
2434
+ fileBlocks = remaining;
1492
2435
  }
1493
- const applyDetails = {};
1494
- if (patches.length)
1495
- applyDetails.patchCount = patches.length;
1496
- if (fileBlocks.length)
1497
- applyDetails.fileCount = fileBlocks.length;
1498
- if (fileBlocks.length && !patches.length)
1499
- applyDetails.mode = "direct";
1500
- await startPhase("apply", applyDetails);
1501
- let patchApplyError = null;
1502
- if (patches.length) {
1503
- const applied = await this.applyPatches(patches, this.workspace.workspaceRoot, request.dryRun ?? false);
1504
- touched = applied.touched;
1505
- if (applied.warnings?.length) {
1506
- await this.logTask(taskRun.id, applied.warnings.join("; "), "patch");
2436
+ if (patches.length === 0 && fileBlocks.length === 0 && !triedRetry) {
2437
+ triedRetry = true;
2438
+ const retryReason = jsonDetected
2439
+ ? "Agent output was JSON-only and did not include patch or file blocks; retrying with explicit output instructions."
2440
+ : "Agent output did not include a patch or file blocks; retrying with explicit output instructions.";
2441
+ await this.logTask(taskRun.id, retryReason, "agent");
2442
+ try {
2443
+ const retryInput = `${systemPrompt}\n\n${attemptPrompt}\n\nOutput only code changes. If editing existing files, output a unified diff inside \`\`\`patch\`\`\` fences. If creating new files, output FILE blocks in this format:\nFILE: path/to/file.ext\n\`\`\`\n<full file contents>\n\`\`\`\nDo not include analysis or narration. Do not output JSON unless the runtime forces it; if forced, return a top-level JSON object with either a \`patch\` string (unified diff) or a \`files\` array of {path, content}.`;
2444
+ const retry = await invokeAgentOnce(retryInput, "agent");
2445
+ agentOutput = retry.output;
2446
+ agentDuration += retry.durationSeconds;
2447
+ await recordUsage("agent_retry", retry.output, retry.durationSeconds, retryInput);
2448
+ ({ patches, fileBlocks, jsonDetected } = extractAgentChanges(agentOutput));
2449
+ if (fileBlocks.length && patches.length) {
2450
+ const { existing, remaining } = splitFileBlocksByExistence(fileBlocks, this.workspace.workspaceRoot);
2451
+ if (existing.length) {
2452
+ await this.logTask(taskRun.id, `Skipped FILE blocks for existing files: ${existing.join(", ")}`, "agent");
2453
+ }
2454
+ fileBlocks = remaining;
2455
+ }
2456
+ }
2457
+ catch (error) {
2458
+ const message = error instanceof Error ? error.message : String(error);
2459
+ await this.logTask(taskRun.id, `Agent retry failed: ${message}`, "agent");
2460
+ }
2461
+ }
2462
+ if (patches.length === 0 && fileBlocks.length === 0) {
2463
+ const message = "Agent output did not include a patch or file blocks.";
2464
+ await this.logTask(taskRun.id, message, "agent");
2465
+ await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
2466
+ await this.stateService.markBlocked(task.task, "missing_patch");
2467
+ results.push({ taskKey: task.task.key, status: "failed", notes: "missing_patch" });
2468
+ taskStatus = "failed";
2469
+ await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
2470
+ continue taskLoop;
2471
+ }
2472
+ if (patches.length || fileBlocks.length) {
2473
+ if (!(await refreshLock("apply_start", true))) {
2474
+ await this.logTask(taskRun.id, "Aborting task: lock lost before apply.", "vcs");
2475
+ throw new Error("Task lock lost before apply.");
2476
+ }
2477
+ const applyDetails = { attempt };
2478
+ if (patches.length)
2479
+ applyDetails.patchCount = patches.length;
2480
+ if (fileBlocks.length)
2481
+ applyDetails.fileCount = fileBlocks.length;
2482
+ if (fileBlocks.length && !patches.length)
2483
+ applyDetails.mode = "direct";
2484
+ await startPhase("apply", applyDetails);
2485
+ let patchApplyError = null;
2486
+ if (patches.length) {
2487
+ const applied = await this.applyPatches(patches, this.workspace.workspaceRoot, request.dryRun ?? false);
2488
+ if (applied.touched.length) {
2489
+ const merged = new Set([...touched, ...applied.touched]);
2490
+ touched = Array.from(merged);
2491
+ }
2492
+ if (applied.warnings?.length) {
2493
+ await this.logTask(taskRun.id, applied.warnings.join("; "), "patch");
2494
+ }
2495
+ if (applied.error) {
2496
+ patchApplyError = applied.error;
2497
+ await this.logTask(taskRun.id, `Patch apply failed: ${applied.error}`, "patch");
2498
+ if (!fileBlocks.length && !triedPatchFallback) {
2499
+ triedPatchFallback = true;
2500
+ const files = Array.from(new Set(patches.flatMap((patch) => touchedFilesFromPatch(patch)))).filter(Boolean);
2501
+ if (files.length) {
2502
+ const fallbackPrompt = [
2503
+ systemPrompt,
2504
+ "",
2505
+ attemptPrompt,
2506
+ "",
2507
+ `Patch apply failed (${applied.error}).`,
2508
+ "Return FILE blocks only for these paths (full contents, no diffs, no prose):",
2509
+ files.map((file) => `- ${file}`).join("\n"),
2510
+ ].join("\n");
2511
+ try {
2512
+ const fallback = await invokeAgentOnce(fallbackPrompt, "agent");
2513
+ agentDuration += fallback.durationSeconds;
2514
+ await recordUsage("agent_retry", fallback.output, fallback.durationSeconds, fallbackPrompt);
2515
+ const fallbackChanges = extractAgentChanges(fallback.output);
2516
+ if (!fallbackChanges.fileBlocks.length && !fallbackChanges.patches.length && files.length === 1) {
2517
+ const inferred = extractPlainCodeFence(fallback.output);
2518
+ if (inferred) {
2519
+ fallbackChanges.fileBlocks = [{ path: files[0], content: inferred }];
2520
+ }
2521
+ }
2522
+ if (fallbackChanges.fileBlocks.length) {
2523
+ fileBlocks = fallbackChanges.fileBlocks;
2524
+ patches = [];
2525
+ patchApplyError = null;
2526
+ await this.logTask(taskRun.id, "Recovered from patch failure using FILE blocks.", "patch");
2527
+ }
2528
+ }
2529
+ catch (error) {
2530
+ const message = error instanceof Error ? error.message : String(error);
2531
+ await this.logTask(taskRun.id, `Patch fallback failed: ${message}`, "patch");
2532
+ }
2533
+ }
2534
+ }
2535
+ if (patchApplyError && !fileBlocks.length) {
2536
+ await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "apply", "error", { error: applied.error, attempt });
2537
+ await this.stateService.markBlocked(task.task, "patch_failed");
2538
+ await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
2539
+ results.push({ taskKey: task.task.key, status: "failed", notes: "patch_failed" });
2540
+ taskStatus = "failed";
2541
+ if (!request.dryRun && request.noCommit !== true) {
2542
+ await this.commitPendingChanges(branchInfo, task.task.key, task.task.title, "auto-save (patch_failed)", task.task.id, taskRun.id);
2543
+ }
2544
+ await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
2545
+ continue taskLoop;
2546
+ }
2547
+ }
1507
2548
  }
1508
- if (applied.error) {
1509
- patchApplyError = applied.error;
1510
- await this.logTask(taskRun.id, `Patch apply failed: ${applied.error}`, "patch");
1511
- if (!fileBlocks.length) {
1512
- await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "apply", "error", { error: applied.error });
2549
+ if (fileBlocks.length) {
2550
+ const onlyExistingFileBlocks = fileBlocks.length > 0 &&
2551
+ fileBlocks.every((block) => {
2552
+ const rawPath = block.path?.trim();
2553
+ if (!rawPath)
2554
+ return false;
2555
+ const resolved = path.resolve(this.workspace.workspaceRoot, rawPath);
2556
+ const relative = path.relative(this.workspace.workspaceRoot, resolved);
2557
+ if (relative.startsWith("..") || path.isAbsolute(relative))
2558
+ return false;
2559
+ return fs.existsSync(resolved);
2560
+ });
2561
+ const allowNoop = patchApplyError === null && (touched.length > 0 || onlyExistingFileBlocks);
2562
+ const allowFileOverwrite = request.allowFileOverwrite === true && patches.length === 0;
2563
+ const applied = await this.applyFileBlocks(fileBlocks, this.workspace.workspaceRoot, request.dryRun ?? false, allowNoop, allowFileOverwrite);
2564
+ if (applied.touched.length) {
2565
+ const merged = new Set([...touched, ...applied.touched]);
2566
+ touched = Array.from(merged);
2567
+ }
2568
+ if (applied.warnings?.length) {
2569
+ await this.logTask(taskRun.id, applied.warnings.join("; "), "patch");
2570
+ }
2571
+ if (applied.error) {
2572
+ await this.logTask(taskRun.id, `Direct file apply failed: ${applied.error}`, "patch");
2573
+ await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "apply", "error", { error: applied.error, attempt });
1513
2574
  await this.stateService.markBlocked(task.task, "patch_failed");
1514
2575
  await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
1515
2576
  results.push({ taskKey: task.task.key, status: "failed", notes: "patch_failed" });
@@ -1518,23 +2579,15 @@ export class WorkOnTasksService {
1518
2579
  await this.commitPendingChanges(branchInfo, task.task.key, task.task.title, "auto-save (patch_failed)", task.task.id, taskRun.id);
1519
2580
  }
1520
2581
  await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1521
- continue;
2582
+ continue taskLoop;
2583
+ }
2584
+ if (patchApplyError && applied.appliedCount > 0) {
2585
+ await this.logTask(taskRun.id, `Patch apply skipped; continued with file blocks. Reason: ${patchApplyError}`, "patch");
2586
+ patchApplyError = null;
1522
2587
  }
1523
2588
  }
1524
- }
1525
- if (fileBlocks.length) {
1526
- const allowNoop = patchApplyError === null && touched.length > 0;
1527
- const applied = await this.applyFileBlocks(fileBlocks, this.workspace.workspaceRoot, request.dryRun ?? false, allowNoop);
1528
- if (applied.touched.length) {
1529
- const merged = new Set([...touched, ...applied.touched]);
1530
- touched = Array.from(merged);
1531
- }
1532
- if (applied.warnings?.length) {
1533
- await this.logTask(taskRun.id, applied.warnings.join("; "), "patch");
1534
- }
1535
- if (applied.error) {
1536
- await this.logTask(taskRun.id, `Direct file apply failed: ${applied.error}`, "patch");
1537
- await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "apply", "error", { error: applied.error });
2589
+ if (patchApplyError) {
2590
+ await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "apply", "error", { error: patchApplyError, attempt });
1538
2591
  await this.stateService.markBlocked(task.task, "patch_failed");
1539
2592
  await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
1540
2593
  results.push({ taskKey: task.task.key, status: "failed", notes: "patch_failed" });
@@ -1543,63 +2596,192 @@ export class WorkOnTasksService {
1543
2596
  await this.commitPendingChanges(branchInfo, task.task.key, task.task.title, "auto-save (patch_failed)", task.task.id, taskRun.id);
1544
2597
  }
1545
2598
  await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1546
- continue;
2599
+ continue taskLoop;
1547
2600
  }
1548
- if (patchApplyError && applied.appliedCount > 0) {
1549
- await this.logTask(taskRun.id, `Patch apply skipped; continued with file blocks. Reason: ${patchApplyError}`, "patch");
1550
- patchApplyError = null;
2601
+ patchApplied = patchApplied || touched.length > 0;
2602
+ await endPhase("apply", { touched, attempt });
2603
+ if (!(await refreshLock("apply"))) {
2604
+ await this.logTask(taskRun.id, "Aborting task: lock lost after apply.", "vcs");
2605
+ throw new Error("Task lock lost after apply.");
1551
2606
  }
1552
2607
  }
1553
- if (patchApplyError) {
1554
- await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "apply", "error", { error: patchApplyError });
1555
- await this.stateService.markBlocked(task.task, "patch_failed");
1556
- await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
1557
- results.push({ taskKey: task.task.key, status: "failed", notes: "patch_failed" });
1558
- taskStatus = "failed";
1559
- if (!request.dryRun && request.noCommit !== true) {
1560
- await this.commitPendingChanges(branchInfo, task.task.key, task.task.title, "auto-save (patch_failed)", task.task.id, taskRun.id);
2608
+ if (patchApplied && allowedFiles.length) {
2609
+ const dirtyAfterApply = (await this.vcs.dirtyPaths(this.workspace.workspaceRoot)).filter((p) => !p.startsWith(".mcoda"));
2610
+ const scopeCheck = this.validateScope(allowedFiles, normalizePaths(this.workspace.workspaceRoot, dirtyAfterApply));
2611
+ if (!scopeCheck.ok) {
2612
+ await this.logTask(taskRun.id, scopeCheck.message ?? "Scope violation", "scope");
2613
+ await this.stateService.markBlocked(task.task, "scope_violation");
2614
+ await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
2615
+ results.push({ taskKey: task.task.key, status: "failed", notes: "scope_violation" });
2616
+ taskStatus = "failed";
2617
+ if (!request.dryRun && request.noCommit !== true && patchApplied) {
2618
+ await this.commitPendingChanges(branchInfo, task.task.key, task.task.title, "auto-save (scope_violation)", task.task.id, taskRun.id);
2619
+ }
2620
+ await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
2621
+ continue taskLoop;
1561
2622
  }
1562
- await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1563
- continue;
1564
2623
  }
1565
- patchApplied = touched.length > 0;
1566
- await endPhase("apply", { touched });
1567
- if (!(await refreshLock("apply"))) {
1568
- await this.logTask(taskRun.id, "Aborting task: lock lost after apply.", "vcs");
1569
- throw new Error("Task lock lost after apply.");
2624
+ if (shouldRunTests) {
2625
+ abortIfSignaled();
2626
+ testAttemptCount += 1;
2627
+ const runAllTestsCommand = detectRunAllTestsCommand(this.workspace.workspaceRoot);
2628
+ if (!runAllTestsCommand) {
2629
+ const expectedCommand = `${resolveNodeCommand()} tests/all.js`;
2630
+ lastTestResults = [
2631
+ {
2632
+ command: expectedCommand,
2633
+ stdout: "",
2634
+ stderr: "Run-all tests script missing (tests/all.js).",
2635
+ code: 1,
2636
+ },
2637
+ ];
2638
+ lastTestFailureSummary = formatTestFailureSummary(lastTestResults);
2639
+ lastTestErrorType = "tests_not_configured";
2640
+ await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "tests", "error", {
2641
+ error: "tests_not_configured",
2642
+ attempt,
2643
+ });
2644
+ await this.logTask(taskRun.id, "Run-all tests script missing; retrying with fixes.", "tests", {
2645
+ attempt,
2646
+ remainingAttempts: maxAttempts - attempt,
2647
+ });
2648
+ await endPhase("tests", { results: lastTestResults, ok: false, attempt, retrying: attempt < maxAttempts });
2649
+ if (!(await refreshLock("tests"))) {
2650
+ await this.logTask(taskRun.id, "Aborting task: lock lost after tests.", "vcs");
2651
+ throw new Error("Task lock lost after tests.");
2652
+ }
2653
+ if (attempt < maxAttempts) {
2654
+ continue;
2655
+ }
2656
+ testsPassed = false;
2657
+ break;
2658
+ }
2659
+ const combinedCommands = [...testCommands, runAllTestsCommand];
2660
+ await startPhase("tests", { commands: combinedCommands, attempt, runAll: true });
2661
+ const testResult = await this.runTests(combinedCommands, this.workspace.workspaceRoot, abortSignal);
2662
+ await this.logTask(taskRun.id, "Test results", "tests", { results: testResult.results, attempt });
2663
+ if (!testResult.ok) {
2664
+ lastTestResults = testResult.results;
2665
+ lastTestFailureSummary = formatTestFailureSummary(testResult.results);
2666
+ lastTestErrorType = "tests_failed";
2667
+ await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "tests", "error", {
2668
+ error: "tests_failed",
2669
+ attempt,
2670
+ });
2671
+ await this.logTask(taskRun.id, "Tests failed; retrying with fixes.", "tests", {
2672
+ attempt,
2673
+ remainingAttempts: maxAttempts - attempt,
2674
+ });
2675
+ await endPhase("tests", { results: testResult.results, ok: false, attempt, retrying: attempt < maxAttempts });
2676
+ if (!(await refreshLock("tests"))) {
2677
+ await this.logTask(taskRun.id, "Aborting task: lock lost after tests.", "vcs");
2678
+ throw new Error("Task lock lost after tests.");
2679
+ }
2680
+ if (attempt < maxAttempts) {
2681
+ continue;
2682
+ }
2683
+ testsPassed = false;
2684
+ break;
2685
+ }
2686
+ await endPhase("tests", { results: testResult.results, ok: true, attempt });
2687
+ testsPassed = true;
2688
+ if (!(await refreshLock("tests"))) {
2689
+ await this.logTask(taskRun.id, "Aborting task: lock lost after tests.", "vcs");
2690
+ throw new Error("Task lock lost after tests.");
2691
+ }
2692
+ }
2693
+ else {
2694
+ testsPassed = true;
2695
+ }
2696
+ if (testsPassed) {
2697
+ break;
1570
2698
  }
1571
2699
  }
1572
- if (patchApplied && allowedFiles.length) {
1573
- const dirtyAfterApply = (await this.vcs.dirtyPaths(this.workspace.workspaceRoot)).filter((p) => !p.startsWith(".mcoda"));
1574
- const scopeCheck = this.validateScope(allowedFiles, normalizePaths(this.workspace.workspaceRoot, dirtyAfterApply));
1575
- if (!scopeCheck.ok) {
1576
- await this.logTask(taskRun.id, scopeCheck.message ?? "Scope violation", "scope");
1577
- await this.stateService.markBlocked(task.task, "scope_violation");
1578
- await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
1579
- results.push({ taskKey: task.task.key, status: "failed", notes: "scope_violation" });
1580
- taskStatus = "failed";
1581
- if (!request.dryRun && request.noCommit !== true && patchApplied) {
1582
- await this.commitPendingChanges(branchInfo, task.task.key, task.task.title, "auto-save (scope_violation)", task.task.id, taskRun.id);
1583
- }
1584
- await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1585
- continue;
2700
+ if (!testsPassed) {
2701
+ const failureReason = lastTestErrorType ?? "tests_failed";
2702
+ await this.logTask(taskRun.id, `Tests failed after ${testAttemptCount} attempt(s).`, "tests", {
2703
+ results: lastTestResults,
2704
+ });
2705
+ await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "tests", "error", {
2706
+ error: failureReason,
2707
+ attempts: testAttemptCount,
2708
+ });
2709
+ await this.stateService.markBlocked(task.task, failureReason);
2710
+ await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
2711
+ results.push({ taskKey: task.task.key, status: "failed", notes: failureReason });
2712
+ taskStatus = "failed";
2713
+ if (!request.dryRun && request.noCommit !== true) {
2714
+ await this.commitPendingChanges(branchInfo, task.task.key, task.task.title, "auto-save (tests_failed)", task.task.id, taskRun.id);
1586
2715
  }
2716
+ await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
2717
+ continue taskLoop;
1587
2718
  }
1588
- if (!request.dryRun && testCommands.length && patchApplied) {
1589
- await startPhase("tests", { commands: testCommands });
1590
- const testResult = await this.runTests(testCommands, this.workspace.workspaceRoot);
1591
- await this.logTask(taskRun.id, "Test results", "tests", { results: testResult.results });
1592
- if (!testResult.ok) {
1593
- await this.logTask(taskRun.id, "Tests failed; continuing task run with warnings.", "tests");
1594
- softFailures.push("tests_failed");
1595
- await endPhase("tests", { results: testResult.results, warning: "tests_failed" });
2719
+ if (!request.dryRun) {
2720
+ let hasChanges = touched.length > 0;
2721
+ if (!hasChanges) {
2722
+ try {
2723
+ const dirty = (await this.vcs.dirtyPaths(this.workspace.workspaceRoot)).filter((p) => !p.startsWith(".mcoda"));
2724
+ hasChanges = dirty.length > 0;
2725
+ }
2726
+ catch {
2727
+ hasChanges = false;
2728
+ }
1596
2729
  }
1597
- else {
1598
- await endPhase("tests", { results: testResult.results });
2730
+ if (!hasChanges && unresolvedComments.length > 0) {
2731
+ const openSlugs = unresolvedComments
2732
+ .map((comment) => comment.slug)
2733
+ .filter((slug) => Boolean(slug && slug.trim()));
2734
+ const slugList = openSlugs.length ? openSlugs.join(", ") : "untracked";
2735
+ const body = [
2736
+ "[work-on-tasks]",
2737
+ "No changes detected while unresolved review/QA comments remain.",
2738
+ `Open comment slugs: ${slugList}`,
2739
+ ].join("\n");
2740
+ await this.deps.workspaceRepo.createTaskComment({
2741
+ taskId: task.task.id,
2742
+ taskRunId: taskRun.id,
2743
+ jobId: job.id,
2744
+ sourceCommand: "work-on-tasks",
2745
+ authorType: "agent",
2746
+ authorAgentId: agent.id,
2747
+ category: "comment_backlog",
2748
+ body,
2749
+ createdAt: new Date().toISOString(),
2750
+ metadata: { reason: "no_changes", openSlugs },
2751
+ });
2752
+ await this.logTask(taskRun.id, `No changes detected; unresolved comments remain (${slugList}).`, "execution");
2753
+ await this.stateService.markBlocked(task.task, "no_changes");
2754
+ await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
2755
+ results.push({ taskKey: task.task.key, status: "failed", notes: "no_changes" });
2756
+ taskStatus = "failed";
2757
+ await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
2758
+ continue taskLoop;
1599
2759
  }
1600
- if (!(await refreshLock("tests"))) {
1601
- await this.logTask(taskRun.id, "Aborting task: lock lost after tests.", "vcs");
1602
- throw new Error("Task lock lost after tests.");
2760
+ if (!hasChanges) {
2761
+ const body = [
2762
+ "[work-on-tasks]",
2763
+ "No changes were applied for this task run.",
2764
+ "Re-run with a stronger agent or clarify the task requirements.",
2765
+ ].join("\n");
2766
+ await this.deps.workspaceRepo.createTaskComment({
2767
+ taskId: task.task.id,
2768
+ taskRunId: taskRun.id,
2769
+ jobId: job.id,
2770
+ sourceCommand: "work-on-tasks",
2771
+ authorType: "agent",
2772
+ authorAgentId: agent.id,
2773
+ category: "no_changes",
2774
+ body,
2775
+ createdAt: new Date().toISOString(),
2776
+ metadata: { reason: "no_changes", initialStatus },
2777
+ });
2778
+ await this.logTask(taskRun.id, "No changes detected; blocking task for escalation.", "execution");
2779
+ await this.stateService.markBlocked(task.task, "no_changes");
2780
+ await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
2781
+ results.push({ taskKey: task.task.key, status: "failed", notes: "no_changes" });
2782
+ taskStatus = "failed";
2783
+ await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
2784
+ continue taskLoop;
1603
2785
  }
1604
2786
  }
1605
2787
  if (!request.dryRun && request.noCommit !== true) {
@@ -1666,59 +2848,96 @@ export class WorkOnTasksService {
1666
2848
  else {
1667
2849
  await this.logTask(taskRun.id, "No changes to commit.", "vcs");
1668
2850
  }
1669
- // Always merge back into base and end on base branch.
1670
- try {
1671
- await this.vcs.merge(this.workspace.workspaceRoot, branchInfo.branch, branchInfo.base);
1672
- mergeStatus = "merged";
2851
+ const restrictAutoMergeWithoutScope = Boolean(this.workspace.config?.restrictAutoMergeWithoutScope);
2852
+ const shouldSkipAutoMerge = !autoMerge || (restrictAutoMergeWithoutScope && allowedFiles.length === 0);
2853
+ if (shouldSkipAutoMerge) {
2854
+ mergeStatus = "skipped";
2855
+ const changedFiles = dirty.length ? dirty : touched.length ? touched : [];
2856
+ const changedNote = changedFiles.length ? `Changed files: ${changedFiles.join(", ")}` : "No changed files detected.";
2857
+ const reason = !autoMerge ? "auto_merge_disabled" : "no_file_scope";
2858
+ const message = !autoMerge
2859
+ ? `Auto-merge disabled; leaving branch ${branchInfo.branch} for manual PR. ${changedNote}`
2860
+ : `Auto-merge skipped because task has no file scope (metadata.files empty). ${changedNote}`;
2861
+ await this.logTask(taskRun.id, message, "vcs", { reason, changedFiles });
2862
+ await this.vcs.checkoutBranch(this.workspace.workspaceRoot, branchInfo.base);
2863
+ }
2864
+ else {
2865
+ // Always merge back into base and end on base branch.
1673
2866
  try {
1674
- headSha = await this.vcs.lastCommitSha(this.workspace.workspaceRoot);
1675
- }
1676
- catch {
1677
- // Best-effort head capture.
1678
- }
1679
- await this.logTask(taskRun.id, `Merged ${branchInfo.branch} into ${branchInfo.base}`, "vcs");
1680
- if (!(await refreshLock("vcs_merge"))) {
1681
- await this.logTask(taskRun.id, "Aborting task: lock lost after merge.", "vcs");
1682
- throw new Error("Task lock lost after merge.");
2867
+ await this.vcs.merge(this.workspace.workspaceRoot, branchInfo.branch, branchInfo.base);
2868
+ mergeStatus = "merged";
2869
+ try {
2870
+ headSha = await this.vcs.lastCommitSha(this.workspace.workspaceRoot);
2871
+ }
2872
+ catch {
2873
+ // Best-effort head capture.
2874
+ }
2875
+ await this.logTask(taskRun.id, `Merged ${branchInfo.branch} into ${branchInfo.base}`, "vcs");
2876
+ if (!(await refreshLock("vcs_merge"))) {
2877
+ await this.logTask(taskRun.id, "Aborting task: lock lost after merge.", "vcs");
2878
+ throw new Error("Task lock lost after merge.");
2879
+ }
1683
2880
  }
1684
- }
1685
- catch (error) {
1686
- mergeStatus = "failed";
1687
- const conflicts = await this.vcs.conflictPaths(this.workspace.workspaceRoot);
1688
- if (conflicts.length) {
1689
- await this.logTask(taskRun.id, `Merge conflicts while merging ${branchInfo.branch} into ${branchInfo.base}.`, "vcs", {
1690
- conflicts,
1691
- });
1692
- throw new Error(`Merge conflict(s) while merging ${branchInfo.branch} into ${branchInfo.base}: ${conflicts.join(", ")}`);
2881
+ catch (error) {
2882
+ mergeStatus = "failed";
2883
+ const conflicts = await this.vcs.conflictPaths(this.workspace.workspaceRoot);
2884
+ if (conflicts.length) {
2885
+ await this.logTask(taskRun.id, `Merge conflicts while merging ${branchInfo.branch} into ${branchInfo.base}.`, "vcs", {
2886
+ conflicts,
2887
+ });
2888
+ await this.vcs.abortMerge(this.workspace.workspaceRoot);
2889
+ await this.stateService.markBlocked(task.task, "merge_conflict");
2890
+ await this.deps.workspaceRepo.updateTaskRun(taskRun.id, {
2891
+ status: "failed",
2892
+ finishedAt: new Date().toISOString(),
2893
+ });
2894
+ results.push({ taskKey: task.task.key, status: "failed", notes: "merge_conflict" });
2895
+ taskStatus = "failed";
2896
+ await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
2897
+ continue taskLoop;
2898
+ }
2899
+ throw error;
1693
2900
  }
1694
- throw error;
1695
2901
  }
1696
2902
  if (await this.vcs.hasRemote(this.workspace.workspaceRoot)) {
1697
- const branchPush = await this.pushWithRecovery(taskRun.id, branchInfo.branch);
1698
- if (branchPush.pushed) {
1699
- await this.logTask(taskRun.id, "Pushed branch to remote origin", "vcs");
1700
- }
1701
- else if (branchPush.skipped) {
1702
- await this.logTask(taskRun.id, "Skipped pushing branch to remote origin due to permissions/protection.", "vcs");
1703
- }
1704
- if (!(await refreshLock("vcs_push_branch"))) {
1705
- await this.logTask(taskRun.id, "Aborting task: lock lost after pushing branch.", "vcs");
1706
- throw new Error("Task lock lost after pushing branch.");
1707
- }
1708
- const basePush = await this.pushWithRecovery(taskRun.id, branchInfo.base);
1709
- if (basePush.pushed) {
1710
- await this.logTask(taskRun.id, `Pushed base branch ${branchInfo.base} to remote origin`, "vcs");
2903
+ if (!autoPush) {
2904
+ await this.logTask(taskRun.id, `Auto-push disabled; skipping remote push for ${branchInfo.branch} and ${branchInfo.base}.`, "vcs", { reason: "auto_push_disabled" });
1711
2905
  }
1712
- else if (basePush.skipped) {
1713
- await this.logTask(taskRun.id, `Skipped pushing base branch ${branchInfo.base} due to permissions/protection.`, "vcs");
1714
- }
1715
- if (!(await refreshLock("vcs_push_base"))) {
1716
- await this.logTask(taskRun.id, "Aborting task: lock lost after pushing base branch.", "vcs");
1717
- throw new Error("Task lock lost after pushing base branch.");
2906
+ else {
2907
+ const branchPush = await this.pushWithRecovery(taskRun.id, branchInfo.branch);
2908
+ if (branchPush.pushed) {
2909
+ await this.logTask(taskRun.id, "Pushed branch to remote origin", "vcs");
2910
+ }
2911
+ else if (branchPush.skipped) {
2912
+ await this.logTask(taskRun.id, "Skipped pushing branch to remote origin due to permissions/protection.", "vcs");
2913
+ }
2914
+ if (!(await refreshLock("vcs_push_branch"))) {
2915
+ await this.logTask(taskRun.id, "Aborting task: lock lost after pushing branch.", "vcs");
2916
+ throw new Error("Task lock lost after pushing branch.");
2917
+ }
2918
+ if (mergeStatus === "merged") {
2919
+ const basePush = await this.pushWithRecovery(taskRun.id, branchInfo.base);
2920
+ if (basePush.pushed) {
2921
+ await this.logTask(taskRun.id, `Pushed base branch ${branchInfo.base} to remote origin`, "vcs");
2922
+ }
2923
+ else if (basePush.skipped) {
2924
+ await this.logTask(taskRun.id, `Skipped pushing base branch ${branchInfo.base} due to permissions/protection.`, "vcs");
2925
+ }
2926
+ if (!(await refreshLock("vcs_push_base"))) {
2927
+ await this.logTask(taskRun.id, "Aborting task: lock lost after pushing base branch.", "vcs");
2928
+ throw new Error("Task lock lost after pushing base branch.");
2929
+ }
2930
+ }
2931
+ else {
2932
+ await this.logTask(taskRun.id, `Skipped pushing base branch ${branchInfo.base} because auto-merge was skipped.`, "vcs");
2933
+ }
1718
2934
  }
1719
2935
  }
1720
2936
  else {
1721
- await this.logTask(taskRun.id, "No remote configured; merge completed locally.", "vcs");
2937
+ const message = mergeStatus === "skipped"
2938
+ ? "No remote configured; auto-merge skipped due to missing file scope."
2939
+ : "No remote configured; merge completed locally.";
2940
+ await this.logTask(taskRun.id, message, "vcs");
1722
2941
  }
1723
2942
  }
1724
2943
  catch (error) {
@@ -1733,7 +2952,7 @@ export class WorkOnTasksService {
1733
2952
  results.push({ taskKey: task.task.key, status: "failed", notes: "vcs_failed" });
1734
2953
  taskStatus = "failed";
1735
2954
  await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1736
- continue;
2955
+ continue taskLoop;
1737
2956
  }
1738
2957
  await endPhase("vcs", { branch: branchInfo.branch, base: branchInfo.base });
1739
2958
  }
@@ -1748,8 +2967,12 @@ export class WorkOnTasksService {
1748
2967
  const elapsedSeconds = Math.max(1, (Date.parse(finishedAt) - Date.parse(startedAt)) / 1000);
1749
2968
  const spPerHour = task.task.storyPoints && task.task.storyPoints > 0 ? (task.task.storyPoints / elapsedSeconds) * 3600 : null;
1750
2969
  const reviewMetadata = { last_run: finishedAt };
1751
- if (softFailures.length) {
1752
- reviewMetadata.soft_failures = softFailures;
2970
+ if (shouldRunTests) {
2971
+ const runAllTestsCommand = detectRunAllTestsCommand(this.workspace.workspaceRoot);
2972
+ const combinedCommands = [...testCommands, ...(runAllTestsCommand ? [runAllTestsCommand] : [])];
2973
+ reviewMetadata.test_attempts = testAttemptCount;
2974
+ reviewMetadata.test_commands = combinedCommands;
2975
+ reviewMetadata.run_all_tests_command = runAllTestsCommand ?? null;
1753
2976
  }
1754
2977
  await this.stateService.markReadyToReview(task.task, reviewMetadata);
1755
2978
  await this.deps.workspaceRepo.updateTaskRun(taskRun.id, {
@@ -1761,7 +2984,7 @@ export class WorkOnTasksService {
1761
2984
  });
1762
2985
  storyPointsProcessed += task.task.storyPoints ?? 0;
1763
2986
  await endPhase("finalize", { spPerHour: spPerHour ?? undefined });
1764
- const resultNotes = softFailures.length ? `ready_to_review_with_warnings:${softFailures.join(",")}` : "ready_to_review";
2987
+ const resultNotes = "ready_to_review";
1765
2988
  taskStatus = "succeeded";
1766
2989
  results.push({
1767
2990
  taskKey: task.task.key,
@@ -1774,6 +2997,14 @@ export class WorkOnTasksService {
1774
2997
  }
1775
2998
  catch (error) {
1776
2999
  const message = error instanceof Error ? error.message : String(error);
3000
+ if (isAbortError(message)) {
3001
+ await this.logTask(taskRun.id, `Task aborted: ${message}`, "execution");
3002
+ await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
3003
+ await this.stateService.markBlocked(task.task, "agent_timeout");
3004
+ taskStatus = "failed";
3005
+ await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
3006
+ throw error;
3007
+ }
1777
3008
  if (/task lock lost/i.test(message)) {
1778
3009
  await this.logTask(taskRun.id, `Task aborted: ${message}`, "vcs");
1779
3010
  await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
@@ -1784,7 +3015,7 @@ export class WorkOnTasksService {
1784
3015
  results.push({ taskKey: task.task.key, status: "failed", notes: "task_lock_lost" });
1785
3016
  taskStatus = "failed";
1786
3017
  await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
1787
- continue;
3018
+ continue taskLoop;
1788
3019
  }
1789
3020
  throw error;
1790
3021
  }
@@ -1793,6 +3024,32 @@ export class WorkOnTasksService {
1793
3024
  if (lockAcquired) {
1794
3025
  await this.deps.workspaceRepo.releaseTaskLock(task.task.id, taskRun.id);
1795
3026
  }
3027
+ if (request.rateAgents && tokensPromptTotal + tokensCompletionTotal > 0) {
3028
+ try {
3029
+ const ratingService = this.ensureRatingService();
3030
+ await ratingService.rate({
3031
+ workspace: this.workspace,
3032
+ agentId: agent.id,
3033
+ commandName: "work-on-tasks",
3034
+ jobId: job.id,
3035
+ commandRunId: commandRun.id,
3036
+ taskId: task.task.id,
3037
+ taskKey: task.task.key,
3038
+ discipline: task.task.type ?? undefined,
3039
+ complexity: this.resolveTaskComplexity(task.task),
3040
+ });
3041
+ }
3042
+ catch (error) {
3043
+ const message = `Agent rating failed for ${task.task.key}: ${error instanceof Error ? error.message : String(error)}`;
3044
+ warnings.push(message);
3045
+ try {
3046
+ await this.logTask(taskRun.id, message, "rating");
3047
+ }
3048
+ catch {
3049
+ /* ignore rating log failures */
3050
+ }
3051
+ }
3052
+ }
1796
3053
  }
1797
3054
  }
1798
3055
  const failureCount = results.filter((r) => r.status === "failed" || r.status === "blocked").length;