@mcoda/core 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/LICENSE +21 -0
- package/README.md +9 -0
- package/dist/api/AgentsApi.d.ts +36 -0
- package/dist/api/AgentsApi.d.ts.map +1 -0
- package/dist/api/AgentsApi.js +176 -0
- package/dist/api/QaTasksApi.d.ts +8 -0
- package/dist/api/QaTasksApi.d.ts.map +1 -0
- package/dist/api/QaTasksApi.js +36 -0
- package/dist/api/TasksApi.d.ts +7 -0
- package/dist/api/TasksApi.d.ts.map +1 -0
- package/dist/api/TasksApi.js +34 -0
- package/dist/config/ConfigService.d.ts +3 -0
- package/dist/config/ConfigService.d.ts.map +1 -0
- package/dist/config/ConfigService.js +2 -0
- package/dist/domain/dependencies/Dependency.d.ts +3 -0
- package/dist/domain/dependencies/Dependency.d.ts.map +1 -0
- package/dist/domain/dependencies/Dependency.js +2 -0
- package/dist/domain/epics/Epic.d.ts +3 -0
- package/dist/domain/epics/Epic.d.ts.map +1 -0
- package/dist/domain/epics/Epic.js +2 -0
- package/dist/domain/projects/Project.d.ts +3 -0
- package/dist/domain/projects/Project.d.ts.map +1 -0
- package/dist/domain/projects/Project.js +2 -0
- package/dist/domain/tasks/Task.d.ts +3 -0
- package/dist/domain/tasks/Task.d.ts.map +1 -0
- package/dist/domain/tasks/Task.js +2 -0
- package/dist/domain/userStories/UserStory.d.ts +3 -0
- package/dist/domain/userStories/UserStory.d.ts.map +1 -0
- package/dist/domain/userStories/UserStory.js +2 -0
- package/dist/index.d.ts +28 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +27 -0
- package/dist/prompts/PdrPrompts.d.ts +4 -0
- package/dist/prompts/PdrPrompts.d.ts.map +1 -0
- package/dist/prompts/PdrPrompts.js +21 -0
- package/dist/prompts/PromptLoader.d.ts +3 -0
- package/dist/prompts/PromptLoader.d.ts.map +1 -0
- package/dist/prompts/PromptLoader.js +2 -0
- package/dist/prompts/SdsPrompts.d.ts +5 -0
- package/dist/prompts/SdsPrompts.d.ts.map +1 -0
- package/dist/prompts/SdsPrompts.js +44 -0
- package/dist/services/agents/AgentManagementService.d.ts +3 -0
- package/dist/services/agents/AgentManagementService.d.ts.map +1 -0
- package/dist/services/agents/AgentManagementService.js +2 -0
- package/dist/services/agents/GatewayAgentService.d.ts +92 -0
- package/dist/services/agents/GatewayAgentService.d.ts.map +1 -0
- package/dist/services/agents/GatewayAgentService.js +870 -0
- package/dist/services/agents/RoutingApiClient.d.ts +23 -0
- package/dist/services/agents/RoutingApiClient.d.ts.map +1 -0
- package/dist/services/agents/RoutingApiClient.js +62 -0
- package/dist/services/agents/RoutingService.d.ts +50 -0
- package/dist/services/agents/RoutingService.d.ts.map +1 -0
- package/dist/services/agents/RoutingService.js +386 -0
- package/dist/services/agents/generated/RoutingApiClient.d.ts +21 -0
- package/dist/services/agents/generated/RoutingApiClient.d.ts.map +1 -0
- package/dist/services/agents/generated/RoutingApiClient.js +68 -0
- package/dist/services/backlog/BacklogService.d.ts +98 -0
- package/dist/services/backlog/BacklogService.d.ts.map +1 -0
- package/dist/services/backlog/BacklogService.js +453 -0
- package/dist/services/backlog/TaskOrderingService.d.ts +88 -0
- package/dist/services/backlog/TaskOrderingService.d.ts.map +1 -0
- package/dist/services/backlog/TaskOrderingService.js +675 -0
- package/dist/services/docs/DocsService.d.ts +82 -0
- package/dist/services/docs/DocsService.d.ts.map +1 -0
- package/dist/services/docs/DocsService.js +1631 -0
- package/dist/services/estimate/EstimateService.d.ts +12 -0
- package/dist/services/estimate/EstimateService.d.ts.map +1 -0
- package/dist/services/estimate/EstimateService.js +103 -0
- package/dist/services/estimate/VelocityService.d.ts +19 -0
- package/dist/services/estimate/VelocityService.d.ts.map +1 -0
- package/dist/services/estimate/VelocityService.js +237 -0
- package/dist/services/estimate/types.d.ts +30 -0
- package/dist/services/estimate/types.d.ts.map +1 -0
- package/dist/services/estimate/types.js +1 -0
- package/dist/services/execution/ExecutionService.d.ts +3 -0
- package/dist/services/execution/ExecutionService.d.ts.map +1 -0
- package/dist/services/execution/ExecutionService.js +2 -0
- package/dist/services/execution/QaFollowupService.d.ts +38 -0
- package/dist/services/execution/QaFollowupService.d.ts.map +1 -0
- package/dist/services/execution/QaFollowupService.js +236 -0
- package/dist/services/execution/QaProfileService.d.ts +22 -0
- package/dist/services/execution/QaProfileService.d.ts.map +1 -0
- package/dist/services/execution/QaProfileService.js +142 -0
- package/dist/services/execution/QaTasksService.d.ts +101 -0
- package/dist/services/execution/QaTasksService.d.ts.map +1 -0
- package/dist/services/execution/QaTasksService.js +1117 -0
- package/dist/services/execution/TaskSelectionService.d.ts +50 -0
- package/dist/services/execution/TaskSelectionService.d.ts.map +1 -0
- package/dist/services/execution/TaskSelectionService.js +281 -0
- package/dist/services/execution/TaskStateService.d.ts +19 -0
- package/dist/services/execution/TaskStateService.d.ts.map +1 -0
- package/dist/services/execution/TaskStateService.js +59 -0
- package/dist/services/execution/WorkOnTasksService.d.ts +80 -0
- package/dist/services/execution/WorkOnTasksService.d.ts.map +1 -0
- package/dist/services/execution/WorkOnTasksService.js +1833 -0
- package/dist/services/jobs/JobInsightsService.d.ts +97 -0
- package/dist/services/jobs/JobInsightsService.d.ts.map +1 -0
- package/dist/services/jobs/JobInsightsService.js +263 -0
- package/dist/services/jobs/JobResumeService.d.ts +16 -0
- package/dist/services/jobs/JobResumeService.d.ts.map +1 -0
- package/dist/services/jobs/JobResumeService.js +113 -0
- package/dist/services/jobs/JobService.d.ts +149 -0
- package/dist/services/jobs/JobService.d.ts.map +1 -0
- package/dist/services/jobs/JobService.js +490 -0
- package/dist/services/jobs/JobsApiClient.d.ts +73 -0
- package/dist/services/jobs/JobsApiClient.d.ts.map +1 -0
- package/dist/services/jobs/JobsApiClient.js +67 -0
- package/dist/services/openapi/OpenApiService.d.ts +54 -0
- package/dist/services/openapi/OpenApiService.d.ts.map +1 -0
- package/dist/services/openapi/OpenApiService.js +503 -0
- package/dist/services/planning/CreateTasksService.d.ts +68 -0
- package/dist/services/planning/CreateTasksService.d.ts.map +1 -0
- package/dist/services/planning/CreateTasksService.js +989 -0
- package/dist/services/planning/KeyHelpers.d.ts +5 -0
- package/dist/services/planning/KeyHelpers.d.ts.map +1 -0
- package/dist/services/planning/KeyHelpers.js +62 -0
- package/dist/services/planning/PlanningService.d.ts +3 -0
- package/dist/services/planning/PlanningService.d.ts.map +1 -0
- package/dist/services/planning/PlanningService.js +2 -0
- package/dist/services/planning/RefineTasksService.d.ts +56 -0
- package/dist/services/planning/RefineTasksService.d.ts.map +1 -0
- package/dist/services/planning/RefineTasksService.js +1328 -0
- package/dist/services/review/CodeReviewService.d.ts +103 -0
- package/dist/services/review/CodeReviewService.d.ts.map +1 -0
- package/dist/services/review/CodeReviewService.js +1187 -0
- package/dist/services/system/SystemUpdateService.d.ts +55 -0
- package/dist/services/system/SystemUpdateService.d.ts.map +1 -0
- package/dist/services/system/SystemUpdateService.js +136 -0
- package/dist/services/tasks/TaskApiResolver.d.ts +7 -0
- package/dist/services/tasks/TaskApiResolver.d.ts.map +1 -0
- package/dist/services/tasks/TaskApiResolver.js +41 -0
- package/dist/services/tasks/TaskDetailService.d.ts +106 -0
- package/dist/services/tasks/TaskDetailService.d.ts.map +1 -0
- package/dist/services/tasks/TaskDetailService.js +332 -0
- package/dist/services/telemetry/TelemetryService.d.ts +53 -0
- package/dist/services/telemetry/TelemetryService.d.ts.map +1 -0
- package/dist/services/telemetry/TelemetryService.js +434 -0
- package/dist/workspace/WorkspaceManager.d.ts +35 -0
- package/dist/workspace/WorkspaceManager.d.ts.map +1 -0
- package/dist/workspace/WorkspaceManager.js +201 -0
- package/package.json +45 -0
|
@@ -0,0 +1,1833 @@
|
|
|
1
|
+
import { exec as execCb } from "node:child_process";
|
|
2
|
+
import { promisify } from "node:util";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import fs from "node:fs";
|
|
5
|
+
import { AgentService } from "@mcoda/agents";
|
|
6
|
+
import { DocdexClient, VcsClient } from "@mcoda/integrations";
|
|
7
|
+
import { GlobalRepository, WorkspaceRepository } from "@mcoda/db";
|
|
8
|
+
import { PathHelper } from "@mcoda/shared";
|
|
9
|
+
import { JobService } from "../jobs/JobService.js";
|
|
10
|
+
import { TaskSelectionService } from "./TaskSelectionService.js";
|
|
11
|
+
import { TaskStateService } from "./TaskStateService.js";
|
|
12
|
+
import { RoutingService } from "../agents/RoutingService.js";
|
|
13
|
+
const exec = promisify(execCb);
|
|
14
|
+
const DEFAULT_BASE_BRANCH = "mcoda-dev";
|
|
15
|
+
const DEFAULT_TASK_BRANCH_PREFIX = "mcoda/task/";
|
|
16
|
+
const TASK_LOCK_TTL_SECONDS = 60 * 60;
|
|
17
|
+
const DEFAULT_CODE_WRITER_PROMPT = [
|
|
18
|
+
"You are the code-writing agent. Before coding, query docdex with the task key and feature keywords (MCP `docdex_search` limit 4–8 or CLI `docdexd query --repo <repo> --query \"<term>\" --limit 6 --snippets=false`). If results look stale, reindex (`docdex_index` or `docdexd index --repo <repo>`) then re-run search. Fetch snippets via `docdex_open` or `/snippet/:doc_id?text_only=true` only for specific hits.",
|
|
19
|
+
"Use docdex snippets to ground decisions (data model, offline/online expectations, constraints, acceptance criteria). Note when docdex is unavailable and fall back to local docs.",
|
|
20
|
+
"Re-use existing store/slices/adapters and tests; avoid inventing new backends or ad-hoc actions. Keep behavior backward-compatible and scoped to the documented contracts.",
|
|
21
|
+
"If you encounter merge conflicts, resolve them first (clean conflict markers and ensure code compiles) before continuing task work.",
|
|
22
|
+
"If a target file does not exist, create it by outputting a FILE block (not a diff): `FILE: path/to/file.ext` followed by a fenced code block containing the full file contents.",
|
|
23
|
+
].join("\n");
|
|
24
|
+
const DEFAULT_JOB_PROMPT = "You are an mcoda agent that follows workspace runbooks and responds with actionable, concise output.";
|
|
25
|
+
const DEFAULT_CHARACTER_PROMPT = "Write clearly, avoid hallucinations, cite assumptions, and prioritize risk mitigation for the user.";
|
|
26
|
+
const estimateTokens = (text) => Math.max(1, Math.ceil((text ?? "").length / 4));
|
|
27
|
+
const extractPatches = (output) => {
|
|
28
|
+
const matches = [...output.matchAll(/```(?:patch|diff)[\s\S]*?```/g)];
|
|
29
|
+
return matches.map((m) => m[0].replace(/```(?:patch|diff)/, "").replace(/```$/, "").trim()).filter(Boolean);
|
|
30
|
+
};
|
|
31
|
+
const extractFileBlocks = (output) => {
|
|
32
|
+
const files = [];
|
|
33
|
+
const regex = /(?:^|\r?\n)FILE:\s*([^\r\n]+)\r?\n```[^\r\n]*\r?\n([\s\S]*?)\r?\n```/g;
|
|
34
|
+
let match;
|
|
35
|
+
while ((match = regex.exec(output)) !== null) {
|
|
36
|
+
const filePath = match[1]?.trim();
|
|
37
|
+
if (!filePath)
|
|
38
|
+
continue;
|
|
39
|
+
files.push({ path: filePath, content: match[2] ?? "" });
|
|
40
|
+
}
|
|
41
|
+
return files;
|
|
42
|
+
};
|
|
43
|
+
const touchedFilesFromPatch = (patch) => {
|
|
44
|
+
const files = new Set();
|
|
45
|
+
const regex = /^\+\+\+\s+b\/([^\s]+)/gm;
|
|
46
|
+
let match;
|
|
47
|
+
while ((match = regex.exec(patch)) !== null) {
|
|
48
|
+
files.add(match[1]);
|
|
49
|
+
}
|
|
50
|
+
return Array.from(files);
|
|
51
|
+
};
|
|
52
|
+
const normalizePaths = (workspaceRoot, files) => files.map((f) => path.relative(workspaceRoot, path.isAbsolute(f) ? f : path.join(workspaceRoot, f))).map((f) => f.replace(/\\/g, "/"));
|
|
53
|
+
const MCODA_GITIGNORE_ENTRY = ".mcoda/\n";
|
|
54
|
+
const WORK_DIR = (jobId, workspaceRoot) => path.join(workspaceRoot, ".mcoda", "jobs", jobId, "work");
|
|
55
|
+
const maybeConvertApplyPatch = (patch) => {
|
|
56
|
+
if (!patch.trimStart().startsWith("*** Begin Patch"))
|
|
57
|
+
return patch;
|
|
58
|
+
const lines = patch.split(/\r?\n/);
|
|
59
|
+
let i = 0;
|
|
60
|
+
const out = [];
|
|
61
|
+
const next = () => lines[++i];
|
|
62
|
+
const current = () => lines[i];
|
|
63
|
+
const advanceUntilNextFile = () => {
|
|
64
|
+
while (i < lines.length && !current().startsWith("*** "))
|
|
65
|
+
i += 1;
|
|
66
|
+
};
|
|
67
|
+
while (i < lines.length) {
|
|
68
|
+
const line = current();
|
|
69
|
+
if (line.startsWith("*** Begin Patch") || line.startsWith("*** End Patch")) {
|
|
70
|
+
i += 1;
|
|
71
|
+
continue;
|
|
72
|
+
}
|
|
73
|
+
if (line.startsWith("*** Add File: ")) {
|
|
74
|
+
const file = line.replace("*** Add File: ", "").trim();
|
|
75
|
+
const content = [];
|
|
76
|
+
i += 1;
|
|
77
|
+
while (i < lines.length && !current().startsWith("*** ")) {
|
|
78
|
+
const l = current();
|
|
79
|
+
if (l.startsWith("+")) {
|
|
80
|
+
content.push(l.slice(1));
|
|
81
|
+
}
|
|
82
|
+
else if (!l.startsWith("\")) {
|
|
83
|
+
// Some apply_patch emitters omit the leading "+", so treat raw lines as content.
|
|
84
|
+
content.push(l);
|
|
85
|
+
}
|
|
86
|
+
i += 1;
|
|
87
|
+
}
|
|
88
|
+
const count = content.length;
|
|
89
|
+
out.push(`diff --git a/${file} b/${file}`);
|
|
90
|
+
out.push("new file mode 100644");
|
|
91
|
+
out.push("--- /dev/null");
|
|
92
|
+
out.push(`+++ b/${file}`);
|
|
93
|
+
if (count > 0) {
|
|
94
|
+
out.push(`@@ -0,0 +1,${count} @@`);
|
|
95
|
+
content.forEach((l) => out.push(`+${l}`));
|
|
96
|
+
}
|
|
97
|
+
else {
|
|
98
|
+
out.push("@@ -0,0 +0,0 @@");
|
|
99
|
+
}
|
|
100
|
+
continue;
|
|
101
|
+
}
|
|
102
|
+
if (line.startsWith("*** Delete File: ")) {
|
|
103
|
+
const file = line.replace("*** Delete File: ", "").trim();
|
|
104
|
+
out.push(`diff --git a/${file} b/${file}`);
|
|
105
|
+
out.push("deleted file mode 100644");
|
|
106
|
+
out.push(`--- a/${file}`);
|
|
107
|
+
out.push("+++ /dev/null");
|
|
108
|
+
out.push("@@ -1 +0,0 @@");
|
|
109
|
+
i += 1;
|
|
110
|
+
advanceUntilNextFile();
|
|
111
|
+
continue;
|
|
112
|
+
}
|
|
113
|
+
if (line.startsWith("*** Update File: ")) {
|
|
114
|
+
const file = line.replace("*** Update File: ", "").trim();
|
|
115
|
+
i += 1;
|
|
116
|
+
// Skip optional move line
|
|
117
|
+
if (i < lines.length && current().startsWith("*** Move to: "))
|
|
118
|
+
i += 1;
|
|
119
|
+
out.push(`diff --git a/${file} b/${file}`);
|
|
120
|
+
out.push(`--- a/${file}`);
|
|
121
|
+
out.push(`+++ b/${file}`);
|
|
122
|
+
while (i < lines.length && !current().startsWith("*** ")) {
|
|
123
|
+
const l = current();
|
|
124
|
+
if (l.startsWith("@@") || l.startsWith("+++") || l.startsWith("---") || l.startsWith("+") || l.startsWith("-") || l.startsWith(" ")) {
|
|
125
|
+
out.push(l);
|
|
126
|
+
}
|
|
127
|
+
i += 1;
|
|
128
|
+
}
|
|
129
|
+
continue;
|
|
130
|
+
}
|
|
131
|
+
i += 1;
|
|
132
|
+
}
|
|
133
|
+
return out.join("\n");
|
|
134
|
+
};
|
|
135
|
+
const ensureDiffHeader = (patch) => {
|
|
136
|
+
const lines = patch.split(/\r?\n/);
|
|
137
|
+
const hasHeader = /^diff --git /m.test(patch);
|
|
138
|
+
const minusIdx = lines.findIndex((l) => l.startsWith("--- "));
|
|
139
|
+
const plusIdx = lines.findIndex((l) => l.startsWith("+++ "));
|
|
140
|
+
if (minusIdx === -1 || plusIdx === -1)
|
|
141
|
+
return patch;
|
|
142
|
+
const minusPathRaw = lines[minusIdx].replace(/^---\s+/, "").trim();
|
|
143
|
+
const plusPathRaw = lines[plusIdx].replace(/^\+\+\+\s+/, "").trim();
|
|
144
|
+
const lhs = minusPathRaw === "/dev/null"
|
|
145
|
+
? plusPathRaw.replace(/^b\//, "")
|
|
146
|
+
: minusPathRaw.replace(/^a\//, "");
|
|
147
|
+
const rhs = plusPathRaw.replace(/^b\//, "");
|
|
148
|
+
const header = `diff --git a/${lhs} b/${rhs}`;
|
|
149
|
+
const result = [...lines];
|
|
150
|
+
if (!hasHeader) {
|
|
151
|
+
result.unshift(header);
|
|
152
|
+
}
|
|
153
|
+
const headerIdx = result.findIndex((l) => l.startsWith("diff --git "));
|
|
154
|
+
const hasNewFileMode = result.some((l) => l.startsWith("new file mode"));
|
|
155
|
+
const isAdd = minusPathRaw === "/dev/null";
|
|
156
|
+
if (isAdd && !hasNewFileMode) {
|
|
157
|
+
result.splice(headerIdx + 1, 0, "new file mode 100644");
|
|
158
|
+
}
|
|
159
|
+
return result.join("\n");
|
|
160
|
+
};
|
|
161
|
+
const stripInvalidIndexLines = (patch) => patch
|
|
162
|
+
.split(/\r?\n/)
|
|
163
|
+
.filter((line) => {
|
|
164
|
+
if (!line.startsWith("index "))
|
|
165
|
+
return true;
|
|
166
|
+
const value = line.replace(/^index\s+/, "").trim();
|
|
167
|
+
return /^[0-9a-f]{7,40}\.\.[0-9a-f]{7,40}$/.test(value);
|
|
168
|
+
})
|
|
169
|
+
.join("\n");
|
|
170
|
+
const isPlaceholderPatch = (patch) => /\?\?\?/.test(patch) || /rest of existing code/i.test(patch);
|
|
171
|
+
const normalizeHunkHeaders = (patch) => {
|
|
172
|
+
const lines = patch.split(/\r?\n/);
|
|
173
|
+
const out = [];
|
|
174
|
+
let currentAddFile = false;
|
|
175
|
+
const countLines = (start) => {
|
|
176
|
+
let minus = 0;
|
|
177
|
+
let plus = 0;
|
|
178
|
+
for (let j = start; j < lines.length; j += 1) {
|
|
179
|
+
const l = lines[j];
|
|
180
|
+
if (l.startsWith("@@") || l.startsWith("diff --git ") || l.startsWith("*** End Patch"))
|
|
181
|
+
break;
|
|
182
|
+
if (l.startsWith("+++ ") || l.startsWith("--- "))
|
|
183
|
+
continue;
|
|
184
|
+
if (l.startsWith(" ")) {
|
|
185
|
+
minus += 1;
|
|
186
|
+
plus += 1;
|
|
187
|
+
}
|
|
188
|
+
else if (l.startsWith("-")) {
|
|
189
|
+
minus += 1;
|
|
190
|
+
}
|
|
191
|
+
else if (l.startsWith("+")) {
|
|
192
|
+
plus += 1;
|
|
193
|
+
}
|
|
194
|
+
else if (!l.trim()) {
|
|
195
|
+
minus += 1;
|
|
196
|
+
plus += 1;
|
|
197
|
+
}
|
|
198
|
+
else {
|
|
199
|
+
break;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
return { minus, plus };
|
|
203
|
+
};
|
|
204
|
+
for (let i = 0; i < lines.length; i += 1) {
|
|
205
|
+
const line = lines[i];
|
|
206
|
+
if (line.startsWith("diff --git ")) {
|
|
207
|
+
currentAddFile = false;
|
|
208
|
+
out.push(line);
|
|
209
|
+
continue;
|
|
210
|
+
}
|
|
211
|
+
if (line.startsWith("--- ")) {
|
|
212
|
+
currentAddFile = line.includes("/dev/null");
|
|
213
|
+
out.push(line);
|
|
214
|
+
continue;
|
|
215
|
+
}
|
|
216
|
+
if (line.startsWith("+++ ")) {
|
|
217
|
+
out.push(line);
|
|
218
|
+
continue;
|
|
219
|
+
}
|
|
220
|
+
const isHunk = line.startsWith("@@");
|
|
221
|
+
const hasRanges = /^@@\s+-\d+/.test(line);
|
|
222
|
+
if (isHunk && !hasRanges) {
|
|
223
|
+
const { minus, plus } = countLines(i + 1);
|
|
224
|
+
const minusCount = currentAddFile ? 0 : minus;
|
|
225
|
+
const plusCount = currentAddFile ? Math.max(plus, 0) : plus;
|
|
226
|
+
out.push(`@@ -0,${minusCount} +1,${plusCount} @@`);
|
|
227
|
+
continue;
|
|
228
|
+
}
|
|
229
|
+
out.push(line);
|
|
230
|
+
}
|
|
231
|
+
return out.join("\n");
|
|
232
|
+
};
|
|
233
|
+
const fixMissingPrefixesInHunks = (patch) => {
|
|
234
|
+
const lines = patch.split(/\r?\n/);
|
|
235
|
+
const out = [];
|
|
236
|
+
let inHunk = false;
|
|
237
|
+
for (const line of lines) {
|
|
238
|
+
if (line.startsWith("@@")) {
|
|
239
|
+
inHunk = true;
|
|
240
|
+
out.push(line);
|
|
241
|
+
continue;
|
|
242
|
+
}
|
|
243
|
+
if (inHunk) {
|
|
244
|
+
if (line.startsWith("diff --git ") || line.startsWith("--- ") || line.startsWith("+++ ") || line.startsWith("*** End Patch")) {
|
|
245
|
+
inHunk = false;
|
|
246
|
+
out.push(line);
|
|
247
|
+
continue;
|
|
248
|
+
}
|
|
249
|
+
if (!/^[+\-\s]/.test(line) && line.trim().length) {
|
|
250
|
+
out.push(`+${line}`);
|
|
251
|
+
continue;
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
out.push(line);
|
|
255
|
+
}
|
|
256
|
+
return out.join("\n");
|
|
257
|
+
};
|
|
258
|
+
const parseAddedFileContents = (patch) => {
|
|
259
|
+
const lines = patch.split(/\r?\n/);
|
|
260
|
+
const additions = {};
|
|
261
|
+
let currentFile = null;
|
|
262
|
+
let isAdd = false;
|
|
263
|
+
for (let i = 0; i < lines.length; i += 1) {
|
|
264
|
+
const line = lines[i];
|
|
265
|
+
if (line.startsWith("diff --git ")) {
|
|
266
|
+
currentFile = null;
|
|
267
|
+
isAdd = false;
|
|
268
|
+
}
|
|
269
|
+
if (line.startsWith("--- ")) {
|
|
270
|
+
const minusPath = line.replace(/^---\s+/, "").trim();
|
|
271
|
+
isAdd = minusPath === "/dev/null";
|
|
272
|
+
}
|
|
273
|
+
if (line.startsWith("+++ ") && isAdd) {
|
|
274
|
+
const plusPath = line.replace(/^\+\+\+\s+/, "").trim().replace(/^b\//, "");
|
|
275
|
+
currentFile = plusPath;
|
|
276
|
+
additions[currentFile] = [];
|
|
277
|
+
}
|
|
278
|
+
if (currentFile && isAdd) {
|
|
279
|
+
if (line.startsWith("+") && !line.startsWith("+++")) {
|
|
280
|
+
additions[currentFile].push(line.slice(1));
|
|
281
|
+
}
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
return Object.fromEntries(Object.entries(additions).map(([file, content]) => [file, content.join("\n")]));
|
|
285
|
+
};
|
|
286
|
+
const updateAddPatchForExistingFile = (patch, existingFiles, cwd) => {
|
|
287
|
+
const additions = parseAddedFileContents(patch);
|
|
288
|
+
const skipped = [];
|
|
289
|
+
let updated = patch;
|
|
290
|
+
for (const file of Object.keys(additions)) {
|
|
291
|
+
const absolute = path.join(cwd, file);
|
|
292
|
+
if (!existingFiles.has(absolute))
|
|
293
|
+
continue;
|
|
294
|
+
try {
|
|
295
|
+
const content = fs.readFileSync(absolute, "utf8");
|
|
296
|
+
if (content.trim() === additions[file].trim()) {
|
|
297
|
+
skipped.push(file);
|
|
298
|
+
continue;
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
catch {
|
|
302
|
+
// ignore read errors; fall back to converting patch
|
|
303
|
+
}
|
|
304
|
+
// Convert add patch to update by removing new file mode and dev/null markers.
|
|
305
|
+
const lines = updated.split(/\r?\n/);
|
|
306
|
+
const out = [];
|
|
307
|
+
for (let i = 0; i < lines.length; i += 1) {
|
|
308
|
+
const line = lines[i];
|
|
309
|
+
if (line.startsWith("diff --git ")) {
|
|
310
|
+
out.push(line);
|
|
311
|
+
continue;
|
|
312
|
+
}
|
|
313
|
+
if (line.startsWith("new file mode") && lines[i + 1]?.includes(file)) {
|
|
314
|
+
continue;
|
|
315
|
+
}
|
|
316
|
+
if (line.startsWith("--- /dev/null") && lines[i + 1]?.includes(file)) {
|
|
317
|
+
out.push(`--- a/${file}`);
|
|
318
|
+
continue;
|
|
319
|
+
}
|
|
320
|
+
out.push(line);
|
|
321
|
+
}
|
|
322
|
+
updated = out.join("\n");
|
|
323
|
+
}
|
|
324
|
+
return { patch: updated, skipped };
|
|
325
|
+
};
|
|
326
|
+
const splitPatchIntoDiffs = (patch) => {
|
|
327
|
+
const parts = patch.split(/^diff --git /m).filter(Boolean);
|
|
328
|
+
if (parts.length <= 1)
|
|
329
|
+
return [patch];
|
|
330
|
+
return parts.map((part) => `diff --git ${part}`.trim());
|
|
331
|
+
};
|
|
332
|
+
export class WorkOnTasksService {
|
|
333
|
+
async readPromptFiles(paths) {
|
|
334
|
+
const contents = [];
|
|
335
|
+
const seen = new Set();
|
|
336
|
+
for (const promptPath of paths) {
|
|
337
|
+
try {
|
|
338
|
+
const content = await fs.promises.readFile(promptPath, "utf8");
|
|
339
|
+
const trimmed = content.trim();
|
|
340
|
+
if (trimmed && !seen.has(trimmed)) {
|
|
341
|
+
contents.push(trimmed);
|
|
342
|
+
seen.add(trimmed);
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
catch {
|
|
346
|
+
/* optional prompt */
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
return contents;
|
|
350
|
+
}
|
|
351
|
+
constructor(workspace, deps) {
|
|
352
|
+
this.workspace = workspace;
|
|
353
|
+
this.deps = deps;
|
|
354
|
+
this.taskLogSeq = new Map();
|
|
355
|
+
this.selectionService = deps.selectionService ?? new TaskSelectionService(workspace, deps.workspaceRepo);
|
|
356
|
+
this.stateService = deps.stateService ?? new TaskStateService(deps.workspaceRepo);
|
|
357
|
+
this.vcs = deps.vcsClient ?? new VcsClient();
|
|
358
|
+
this.routingService = deps.routingService;
|
|
359
|
+
}
|
|
360
|
+
async loadPrompts(agentId) {
|
|
361
|
+
const agentPrompts = "getPrompts" in this.deps.agentService ? await this.deps.agentService.getPrompts(agentId) : undefined;
|
|
362
|
+
const mcodaPromptPath = path.join(this.workspace.workspaceRoot, ".mcoda", "prompts", "code-writer.md");
|
|
363
|
+
const workspacePromptPath = path.join(this.workspace.workspaceRoot, "prompts", "code-writer.md");
|
|
364
|
+
try {
|
|
365
|
+
await fs.promises.mkdir(path.dirname(mcodaPromptPath), { recursive: true });
|
|
366
|
+
await fs.promises.access(mcodaPromptPath);
|
|
367
|
+
console.info(`[work-on-tasks] using existing code-writer prompt at ${mcodaPromptPath}`);
|
|
368
|
+
}
|
|
369
|
+
catch {
|
|
370
|
+
try {
|
|
371
|
+
await fs.promises.access(workspacePromptPath);
|
|
372
|
+
await fs.promises.copyFile(workspacePromptPath, mcodaPromptPath);
|
|
373
|
+
console.info(`[work-on-tasks] copied code-writer prompt to ${mcodaPromptPath}`);
|
|
374
|
+
}
|
|
375
|
+
catch {
|
|
376
|
+
console.info(`[work-on-tasks] no code-writer prompt found at ${workspacePromptPath}; writing default prompt to ${mcodaPromptPath}`);
|
|
377
|
+
await fs.promises.writeFile(mcodaPromptPath, DEFAULT_CODE_WRITER_PROMPT, "utf8");
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
const commandPromptFiles = await this.readPromptFiles([
|
|
381
|
+
mcodaPromptPath,
|
|
382
|
+
workspacePromptPath,
|
|
383
|
+
]);
|
|
384
|
+
const mergedCommandPrompt = (() => {
|
|
385
|
+
const parts = [...commandPromptFiles];
|
|
386
|
+
if (agentPrompts?.commandPrompts?.["work-on-tasks"]) {
|
|
387
|
+
parts.push(agentPrompts.commandPrompts["work-on-tasks"]);
|
|
388
|
+
}
|
|
389
|
+
if (!parts.length)
|
|
390
|
+
parts.push(DEFAULT_CODE_WRITER_PROMPT);
|
|
391
|
+
return parts.filter(Boolean).join("\n\n");
|
|
392
|
+
})();
|
|
393
|
+
return {
|
|
394
|
+
jobPrompt: agentPrompts?.jobPrompt ?? DEFAULT_JOB_PROMPT,
|
|
395
|
+
characterPrompt: agentPrompts?.characterPrompt ?? DEFAULT_CHARACTER_PROMPT,
|
|
396
|
+
commandPrompt: mergedCommandPrompt || undefined,
|
|
397
|
+
};
|
|
398
|
+
}
|
|
399
|
+
async ensureMcoda() {
|
|
400
|
+
await PathHelper.ensureDir(this.workspace.mcodaDir);
|
|
401
|
+
const gitignorePath = path.join(this.workspace.workspaceRoot, ".gitignore");
|
|
402
|
+
try {
|
|
403
|
+
const content = await fs.promises.readFile(gitignorePath, "utf8");
|
|
404
|
+
if (!content.includes(".mcoda/")) {
|
|
405
|
+
await fs.promises.writeFile(gitignorePath, `${content.trimEnd()}\n${MCODA_GITIGNORE_ENTRY}`, "utf8");
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
catch {
|
|
409
|
+
await fs.promises.writeFile(gitignorePath, MCODA_GITIGNORE_ENTRY, "utf8");
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
async writeWorkCheckpoint(jobId, data) {
|
|
413
|
+
const dir = WORK_DIR(jobId, this.workspace.workspaceRoot);
|
|
414
|
+
await fs.promises.mkdir(dir, { recursive: true });
|
|
415
|
+
const target = path.join(dir, "state.json");
|
|
416
|
+
await fs.promises.writeFile(target, JSON.stringify({ ...data, updatedAt: new Date().toISOString() }, null, 2), "utf8");
|
|
417
|
+
}
|
|
418
|
+
async checkpoint(jobId, stage, details) {
|
|
419
|
+
const timestamp = new Date().toISOString();
|
|
420
|
+
await this.deps.jobService.writeCheckpoint(jobId, {
|
|
421
|
+
stage,
|
|
422
|
+
timestamp,
|
|
423
|
+
details,
|
|
424
|
+
});
|
|
425
|
+
await this.writeWorkCheckpoint(jobId, { stage, details, timestamp });
|
|
426
|
+
}
|
|
427
|
+
static async create(workspace) {
|
|
428
|
+
const repo = await GlobalRepository.create();
|
|
429
|
+
const agentService = new AgentService(repo);
|
|
430
|
+
const routingService = await RoutingService.create();
|
|
431
|
+
const docdex = new DocdexClient({
|
|
432
|
+
workspaceRoot: workspace.workspaceRoot,
|
|
433
|
+
baseUrl: workspace.config?.docdexUrl ?? process.env.MCODA_DOCDEX_URL,
|
|
434
|
+
});
|
|
435
|
+
const workspaceRepo = await WorkspaceRepository.create(workspace.workspaceRoot);
|
|
436
|
+
const jobService = new JobService(workspace, workspaceRepo);
|
|
437
|
+
const selectionService = new TaskSelectionService(workspace, workspaceRepo);
|
|
438
|
+
const stateService = new TaskStateService(workspaceRepo);
|
|
439
|
+
const vcsClient = new VcsClient();
|
|
440
|
+
return new WorkOnTasksService(workspace, {
|
|
441
|
+
agentService,
|
|
442
|
+
docdex,
|
|
443
|
+
jobService,
|
|
444
|
+
workspaceRepo,
|
|
445
|
+
selectionService,
|
|
446
|
+
stateService,
|
|
447
|
+
repo,
|
|
448
|
+
vcsClient,
|
|
449
|
+
routingService,
|
|
450
|
+
});
|
|
451
|
+
}
|
|
452
|
+
async close() {
|
|
453
|
+
const maybeClose = async (target) => {
|
|
454
|
+
try {
|
|
455
|
+
if (target?.close)
|
|
456
|
+
await target.close();
|
|
457
|
+
}
|
|
458
|
+
catch {
|
|
459
|
+
/* ignore */
|
|
460
|
+
}
|
|
461
|
+
};
|
|
462
|
+
await maybeClose(this.deps.selectionService);
|
|
463
|
+
await maybeClose(this.deps.stateService);
|
|
464
|
+
await maybeClose(this.deps.agentService);
|
|
465
|
+
await maybeClose(this.deps.jobService);
|
|
466
|
+
await maybeClose(this.deps.repo);
|
|
467
|
+
await maybeClose(this.deps.workspaceRepo);
|
|
468
|
+
await maybeClose(this.deps.routingService);
|
|
469
|
+
await maybeClose(this.deps.docdex);
|
|
470
|
+
}
|
|
471
|
+
async resolveAgent(agentName) {
|
|
472
|
+
const resolved = await this.routingService.resolveAgentForCommand({
|
|
473
|
+
workspace: this.workspace,
|
|
474
|
+
commandName: "work-on-tasks",
|
|
475
|
+
overrideAgentSlug: agentName,
|
|
476
|
+
});
|
|
477
|
+
return resolved.agent;
|
|
478
|
+
}
|
|
479
|
+
nextLogSeq(taskRunId) {
|
|
480
|
+
const next = (this.taskLogSeq.get(taskRunId) ?? 0) + 1;
|
|
481
|
+
this.taskLogSeq.set(taskRunId, next);
|
|
482
|
+
return next;
|
|
483
|
+
}
|
|
484
|
+
async logTask(taskRunId, message, source, details) {
|
|
485
|
+
await this.deps.workspaceRepo.insertTaskLog({
|
|
486
|
+
taskRunId,
|
|
487
|
+
sequence: this.nextLogSeq(taskRunId),
|
|
488
|
+
timestamp: new Date().toISOString(),
|
|
489
|
+
source: source ?? "work-on-tasks",
|
|
490
|
+
message,
|
|
491
|
+
details: details ?? undefined,
|
|
492
|
+
});
|
|
493
|
+
}
|
|
494
|
+
async recordTokenUsage(params) {
|
|
495
|
+
const total = params.tokensPrompt + params.tokensCompletion;
|
|
496
|
+
await this.deps.jobService.recordTokenUsage({
|
|
497
|
+
workspaceId: this.workspace.workspaceId,
|
|
498
|
+
agentId: params.agentId,
|
|
499
|
+
modelName: params.model,
|
|
500
|
+
jobId: params.jobId,
|
|
501
|
+
commandRunId: params.commandRunId,
|
|
502
|
+
taskRunId: params.taskRunId,
|
|
503
|
+
taskId: params.taskId,
|
|
504
|
+
projectId: params.projectId,
|
|
505
|
+
tokensPrompt: params.tokensPrompt,
|
|
506
|
+
tokensCompletion: params.tokensCompletion,
|
|
507
|
+
tokensTotal: total,
|
|
508
|
+
durationSeconds: params.durationSeconds ?? null,
|
|
509
|
+
timestamp: new Date().toISOString(),
|
|
510
|
+
metadata: { commandName: "work-on-tasks", phase: params.phase ?? "agent", action: params.phase ?? "agent" },
|
|
511
|
+
});
|
|
512
|
+
}
|
|
513
|
+
async updateTaskPhase(jobId, taskRunId, taskKey, phase, status, details) {
|
|
514
|
+
const payload = { taskKey, phase, status, ...(details ?? {}) };
|
|
515
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRunId, { runContext: { phase, status } });
|
|
516
|
+
await this.logTask(taskRunId, `${phase}:${status}`, phase, payload);
|
|
517
|
+
await this.checkpoint(jobId, `task:${taskKey}:${phase}:${status}`, payload);
|
|
518
|
+
}
|
|
519
|
+
async gatherDocContext(projectKey, docLinks = []) {
|
|
520
|
+
const warnings = [];
|
|
521
|
+
const parts = [];
|
|
522
|
+
try {
|
|
523
|
+
const docs = await this.deps.docdex.search({ projectKey, profile: "workspace-code" });
|
|
524
|
+
parts.push(...docs
|
|
525
|
+
.slice(0, 5)
|
|
526
|
+
.map((doc) => `- [${doc.docType}] ${doc.title ?? doc.path ?? doc.id}`));
|
|
527
|
+
}
|
|
528
|
+
catch (error) {
|
|
529
|
+
warnings.push(`docdex search failed: ${error.message}`);
|
|
530
|
+
}
|
|
531
|
+
for (const link of docLinks) {
|
|
532
|
+
try {
|
|
533
|
+
const doc = await this.deps.docdex.fetchDocumentById(link);
|
|
534
|
+
const excerpt = doc.segments?.[0]?.content?.slice(0, 240);
|
|
535
|
+
parts.push(`- [linked:${doc.docType}] ${doc.title ?? doc.id}${excerpt ? ` — ${excerpt}` : ""}`);
|
|
536
|
+
}
|
|
537
|
+
catch (error) {
|
|
538
|
+
warnings.push(`docdex fetch failed for ${link}: ${error.message}`);
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
const summary = parts.join("\n");
|
|
542
|
+
return { summary, warnings };
|
|
543
|
+
}
|
|
544
|
+
buildPrompt(task, docSummary, fileScope) {
|
|
545
|
+
const deps = task.dependencies.keys.length ? `Depends on: ${task.dependencies.keys.join(", ")}` : "No open dependencies.";
|
|
546
|
+
const acceptance = (task.task.acceptanceCriteria ?? []).join("; ");
|
|
547
|
+
const docdexHint = docSummary ||
|
|
548
|
+
"Use docdex: search workspace docs with project key and fetch linked documents when present (doc_links metadata).";
|
|
549
|
+
return [
|
|
550
|
+
`Task ${task.task.key}: ${task.task.title}`,
|
|
551
|
+
`Description: ${task.task.description ?? "(none)"}`,
|
|
552
|
+
`Epic: ${task.task.epicKey} (${task.task.epicTitle ?? "n/a"}), Story: ${task.task.storyKey} (${task.task.storyTitle ?? "n/a"})`,
|
|
553
|
+
`Acceptance: ${acceptance || "Refer to SDS/OpenAPI for expected behavior."}`,
|
|
554
|
+
deps,
|
|
555
|
+
`Allowed files: ${fileScope.length ? fileScope.join(", ") : "(not constrained)"}`,
|
|
556
|
+
`Doc context:\n${docdexHint}`,
|
|
557
|
+
"Verify target paths against the current workspace (use docdex/file hints); do not assume hashed or generated asset names exist. If a path is missing, emit a new-file diff with full content (and parent dirs) instead of editing a non-existent file so git apply succeeds. Use JSON.parse-friendly unified diffs.",
|
|
558
|
+
"Produce a concise plan and a patch in unified diff fenced with ```patch```.",
|
|
559
|
+
].join("\n");
|
|
560
|
+
}
|
|
561
|
+
async checkoutBaseBranch(baseBranch) {
|
|
562
|
+
await this.vcs.ensureRepo(this.workspace.workspaceRoot);
|
|
563
|
+
await this.vcs.ensureBaseBranch(this.workspace.workspaceRoot, baseBranch);
|
|
564
|
+
const dirtyBefore = await this.vcs.dirtyPaths(this.workspace.workspaceRoot);
|
|
565
|
+
const nonMcodaBefore = dirtyBefore.filter((p) => !p.startsWith(".mcoda"));
|
|
566
|
+
if (nonMcodaBefore.length) {
|
|
567
|
+
await this.vcs.stage(this.workspace.workspaceRoot, nonMcodaBefore);
|
|
568
|
+
const status = await this.vcs.status(this.workspace.workspaceRoot);
|
|
569
|
+
if (status.trim().length) {
|
|
570
|
+
await this.vcs.commit(this.workspace.workspaceRoot, "[mcoda] auto-commit workspace changes");
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
const dirtyAfter = await this.vcs.dirtyPaths(this.workspace.workspaceRoot);
|
|
574
|
+
const nonMcodaAfter = dirtyAfter.filter((p) => !p.startsWith(".mcoda"));
|
|
575
|
+
if (nonMcodaAfter.length) {
|
|
576
|
+
throw new Error(`Working tree dirty: ${nonMcodaAfter.join(", ")}`);
|
|
577
|
+
}
|
|
578
|
+
await this.vcs.checkoutBranch(this.workspace.workspaceRoot, baseBranch);
|
|
579
|
+
}
|
|
580
|
+
async commitPendingChanges(branchInfo, taskKey, taskTitle, reason, taskId, taskRunId) {
|
|
581
|
+
const dirty = await this.vcs.dirtyPaths(this.workspace.workspaceRoot);
|
|
582
|
+
const nonMcoda = dirty.filter((p) => !p.startsWith(".mcoda"));
|
|
583
|
+
if (!nonMcoda.length)
|
|
584
|
+
return;
|
|
585
|
+
await this.vcs.stage(this.workspace.workspaceRoot, nonMcoda);
|
|
586
|
+
const status = await this.vcs.status(this.workspace.workspaceRoot);
|
|
587
|
+
if (!status.trim().length)
|
|
588
|
+
return;
|
|
589
|
+
const message = `[${taskKey}] ${taskTitle} (${reason})`;
|
|
590
|
+
await this.vcs.commit(this.workspace.workspaceRoot, message);
|
|
591
|
+
const head = await this.vcs.lastCommitSha(this.workspace.workspaceRoot);
|
|
592
|
+
await this.deps.workspaceRepo.updateTask(taskId, {
|
|
593
|
+
vcsLastCommitSha: head,
|
|
594
|
+
vcsBranch: branchInfo?.branch ?? null,
|
|
595
|
+
vcsBaseBranch: branchInfo?.base ?? null,
|
|
596
|
+
});
|
|
597
|
+
await this.logTask(taskRunId, `Auto-committed pending changes (${reason})`, "vcs", {
|
|
598
|
+
branch: branchInfo?.branch,
|
|
599
|
+
base: branchInfo?.base,
|
|
600
|
+
head,
|
|
601
|
+
});
|
|
602
|
+
}
|
|
603
|
+
async ensureBranches(taskKey, baseBranch, taskRunId) {
|
|
604
|
+
const branch = `${DEFAULT_TASK_BRANCH_PREFIX}${taskKey}`;
|
|
605
|
+
await this.checkoutBaseBranch(baseBranch);
|
|
606
|
+
const hasRemote = await this.vcs.hasRemote(this.workspace.workspaceRoot);
|
|
607
|
+
if (hasRemote) {
|
|
608
|
+
try {
|
|
609
|
+
await this.vcs.pull(this.workspace.workspaceRoot, "origin", baseBranch, true);
|
|
610
|
+
}
|
|
611
|
+
catch (error) {
|
|
612
|
+
await this.logTask(taskRunId, `Warning: failed to pull ${baseBranch} from origin; continuing with local base.`, "vcs", {
|
|
613
|
+
error: error.message,
|
|
614
|
+
});
|
|
615
|
+
}
|
|
616
|
+
}
|
|
617
|
+
const branchExists = await this.vcs.branchExists(this.workspace.workspaceRoot, branch);
|
|
618
|
+
let remoteSyncNote = "";
|
|
619
|
+
if (branchExists) {
|
|
620
|
+
await this.vcs.checkoutBranch(this.workspace.workspaceRoot, branch);
|
|
621
|
+
const dirty = (await this.vcs.dirtyPaths(this.workspace.workspaceRoot)).filter((p) => !p.startsWith(".mcoda"));
|
|
622
|
+
if (dirty.length) {
|
|
623
|
+
throw new Error(`Task branch ${branch} has uncommitted changes: ${dirty.join(", ")}`);
|
|
624
|
+
}
|
|
625
|
+
if (hasRemote) {
|
|
626
|
+
try {
|
|
627
|
+
await this.vcs.pull(this.workspace.workspaceRoot, "origin", branch, true);
|
|
628
|
+
}
|
|
629
|
+
catch (error) {
|
|
630
|
+
const errorText = this.formatGitError(error);
|
|
631
|
+
await this.logTask(taskRunId, `Warning: failed to pull ${branch} from origin; continuing with local branch.`, "vcs", {
|
|
632
|
+
error: errorText,
|
|
633
|
+
});
|
|
634
|
+
if (this.isNonFastForwardPull(errorText)) {
|
|
635
|
+
remoteSyncNote = `Remote task branch ${branch} is ahead/diverged. Sync it with origin (pull/rebase or merge) and resolve conflicts before continuing task work.`;
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
try {
|
|
640
|
+
await this.vcs.merge(this.workspace.workspaceRoot, baseBranch, branch, true);
|
|
641
|
+
}
|
|
642
|
+
catch (error) {
|
|
643
|
+
const conflicts = await this.vcs.conflictPaths(this.workspace.workspaceRoot);
|
|
644
|
+
if (conflicts.length) {
|
|
645
|
+
await this.logTask(taskRunId, `Merge conflicts detected while merging ${baseBranch} into ${branch}.`, "vcs", {
|
|
646
|
+
conflicts,
|
|
647
|
+
});
|
|
648
|
+
return { branch, base: baseBranch, mergeConflicts: conflicts, remoteSyncNote };
|
|
649
|
+
}
|
|
650
|
+
throw new Error(`Failed to merge ${baseBranch} into ${branch}: ${error.message}`);
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
else {
|
|
654
|
+
await this.vcs.createOrCheckoutBranch(this.workspace.workspaceRoot, branch, baseBranch);
|
|
655
|
+
}
|
|
656
|
+
return { branch, base: baseBranch, remoteSyncNote: remoteSyncNote || undefined };
|
|
657
|
+
}
|
|
658
|
+
formatGitError(error) {
|
|
659
|
+
if (!error)
|
|
660
|
+
return "";
|
|
661
|
+
const stderr = typeof error.stderr === "string" ? error.stderr : "";
|
|
662
|
+
const stdout = typeof error.stdout === "string" ? error.stdout : "";
|
|
663
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
664
|
+
return [message, stderr, stdout].filter(Boolean).join(" ");
|
|
665
|
+
}
|
|
666
|
+
isNonFastForwardPush(errorText) {
|
|
667
|
+
return /non-fast-forward|fetch first|rejected/i.test(errorText);
|
|
668
|
+
}
|
|
669
|
+
isNonFastForwardPull(errorText) {
|
|
670
|
+
return /not possible to fast-forward|divergent|non-fast-forward|rejected/i.test(errorText);
|
|
671
|
+
}
|
|
672
|
+
isRemotePermissionError(errorText) {
|
|
673
|
+
return /protected branch|gh006|permission denied|not authorized|not allowed to push|access denied|403|forbidden/i.test(errorText);
|
|
674
|
+
}
|
|
675
|
+
isCommitHookFailure(errorText) {
|
|
676
|
+
return /hook|pre-commit|commit-msg|husky/i.test(errorText);
|
|
677
|
+
}
|
|
678
|
+
isGpgSignFailure(errorText) {
|
|
679
|
+
return /gpg|signing key|signing failed|gpg failed|no secret key/i.test(errorText);
|
|
680
|
+
}
|
|
681
|
+
async pushWithRecovery(taskRunId, branch) {
|
|
682
|
+
const cwd = this.workspace.workspaceRoot;
|
|
683
|
+
try {
|
|
684
|
+
await this.vcs.push(cwd, "origin", branch);
|
|
685
|
+
return { pushed: true, skipped: false };
|
|
686
|
+
}
|
|
687
|
+
catch (error) {
|
|
688
|
+
const errorText = this.formatGitError(error);
|
|
689
|
+
if (this.isRemotePermissionError(errorText)) {
|
|
690
|
+
await this.logTask(taskRunId, `Remote rejected push for ${branch} due to permissions or branch protection; continuing with local commits.`, "vcs", {
|
|
691
|
+
error: errorText,
|
|
692
|
+
guidance: "Use a token with write access or push to an unprotected branch and open a PR.",
|
|
693
|
+
});
|
|
694
|
+
return { pushed: false, skipped: true, reason: "permission" };
|
|
695
|
+
}
|
|
696
|
+
if (!this.isNonFastForwardPush(errorText)) {
|
|
697
|
+
throw error;
|
|
698
|
+
}
|
|
699
|
+
await this.logTask(taskRunId, `Non-fast-forward push rejected for ${branch}; attempting to pull and retry.`, "vcs", { error: errorText });
|
|
700
|
+
const currentBranch = await this.vcs.currentBranch(cwd);
|
|
701
|
+
if (currentBranch && currentBranch !== branch) {
|
|
702
|
+
await this.vcs.ensureClean(cwd);
|
|
703
|
+
await this.vcs.checkoutBranch(cwd, branch);
|
|
704
|
+
}
|
|
705
|
+
try {
|
|
706
|
+
await this.vcs.pull(cwd, "origin", branch, false);
|
|
707
|
+
await this.vcs.push(cwd, "origin", branch);
|
|
708
|
+
}
|
|
709
|
+
catch (retryError) {
|
|
710
|
+
const retryText = this.formatGitError(retryError);
|
|
711
|
+
if (this.isRemotePermissionError(retryText)) {
|
|
712
|
+
await this.logTask(taskRunId, `Remote rejected push for ${branch} after sync due to permissions or branch protection; continuing with local commits.`, "vcs", {
|
|
713
|
+
error: retryText,
|
|
714
|
+
guidance: "Use a token with write access or push to an unprotected branch and open a PR.",
|
|
715
|
+
});
|
|
716
|
+
return { pushed: false, skipped: true, reason: "permission" };
|
|
717
|
+
}
|
|
718
|
+
throw new Error(`Non-fast-forward push rejected for ${branch}; retry after pull failed: ${retryText}`);
|
|
719
|
+
}
|
|
720
|
+
finally {
|
|
721
|
+
if (currentBranch && currentBranch !== branch) {
|
|
722
|
+
await this.vcs.checkoutBranch(cwd, currentBranch);
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
await this.logTask(taskRunId, `Push recovered after syncing ${branch} from origin.`, "vcs");
|
|
726
|
+
return { pushed: true, skipped: false };
|
|
727
|
+
}
|
|
728
|
+
}
|
|
729
|
+
validateScope(allowed, touched) {
|
|
730
|
+
if (!allowed.length)
|
|
731
|
+
return { ok: true };
|
|
732
|
+
const normalizedAllowed = allowed.map((f) => f.replace(/\\/g, "/"));
|
|
733
|
+
const outOfScope = touched.filter((f) => !normalizedAllowed.some((allowedPath) => f === allowedPath || f.startsWith(`${allowedPath}/`)));
|
|
734
|
+
if (outOfScope.length) {
|
|
735
|
+
return { ok: false, message: `Patch touches files outside allowed scope: ${outOfScope.join(", ")}` };
|
|
736
|
+
}
|
|
737
|
+
return { ok: true };
|
|
738
|
+
}
|
|
739
|
+
async applyPatches(patches, cwd, dryRun) {
|
|
740
|
+
const touched = new Set();
|
|
741
|
+
const warnings = [];
|
|
742
|
+
let applied = 0;
|
|
743
|
+
for (const patch of patches) {
|
|
744
|
+
const normalized = maybeConvertApplyPatch(patch);
|
|
745
|
+
const withHeader = ensureDiffHeader(normalized);
|
|
746
|
+
const withHunks = normalizeHunkHeaders(withHeader);
|
|
747
|
+
const withPrefixes = fixMissingPrefixesInHunks(withHunks);
|
|
748
|
+
const sanitized = stripInvalidIndexLines(withPrefixes);
|
|
749
|
+
if (isPlaceholderPatch(sanitized)) {
|
|
750
|
+
warnings.push("Skipped placeholder patch that contained ??? or 'rest of existing code'.");
|
|
751
|
+
continue;
|
|
752
|
+
}
|
|
753
|
+
const files = touchedFilesFromPatch(sanitized);
|
|
754
|
+
if (!files.length) {
|
|
755
|
+
warnings.push("Skipped patch with no recognizable file paths.");
|
|
756
|
+
continue;
|
|
757
|
+
}
|
|
758
|
+
const segments = splitPatchIntoDiffs(sanitized);
|
|
759
|
+
for (const segment of segments) {
|
|
760
|
+
const segmentFiles = touchedFilesFromPatch(segment);
|
|
761
|
+
const existingFiles = new Set(segmentFiles.map((f) => path.join(cwd, f)).filter((f) => fs.existsSync(f)));
|
|
762
|
+
let patchToApply = segment;
|
|
763
|
+
if (existingFiles.size > 0) {
|
|
764
|
+
const { patch: converted, skipped } = updateAddPatchForExistingFile(segment, existingFiles, cwd);
|
|
765
|
+
patchToApply = converted;
|
|
766
|
+
if (skipped.length) {
|
|
767
|
+
warnings.push(`Skipped add patch for existing files: ${skipped.join(", ")}`);
|
|
768
|
+
continue;
|
|
769
|
+
}
|
|
770
|
+
}
|
|
771
|
+
if (dryRun) {
|
|
772
|
+
segmentFiles.forEach((f) => touched.add(f));
|
|
773
|
+
applied += 1;
|
|
774
|
+
continue;
|
|
775
|
+
}
|
|
776
|
+
// Ensure target directories exist for new/updated files.
|
|
777
|
+
for (const file of segmentFiles) {
|
|
778
|
+
const dir = path.dirname(path.join(cwd, file));
|
|
779
|
+
try {
|
|
780
|
+
await fs.promises.mkdir(dir, { recursive: true });
|
|
781
|
+
}
|
|
782
|
+
catch {
|
|
783
|
+
/* ignore mkdir errors; git apply will surface issues */
|
|
784
|
+
}
|
|
785
|
+
}
|
|
786
|
+
try {
|
|
787
|
+
await this.vcs.applyPatch(cwd, patchToApply);
|
|
788
|
+
segmentFiles.forEach((f) => touched.add(f));
|
|
789
|
+
applied += 1;
|
|
790
|
+
}
|
|
791
|
+
catch (error) {
|
|
792
|
+
// Fallback: if the segment only adds new files and git apply fails, write the files directly.
|
|
793
|
+
const additions = parseAddedFileContents(patchToApply);
|
|
794
|
+
const addTargets = Object.keys(additions);
|
|
795
|
+
if (addTargets.length && segmentFiles.length === addTargets.length) {
|
|
796
|
+
try {
|
|
797
|
+
for (const file of addTargets) {
|
|
798
|
+
const dest = path.join(cwd, file);
|
|
799
|
+
await fs.promises.mkdir(path.dirname(dest), { recursive: true });
|
|
800
|
+
await fs.promises.writeFile(dest, additions[file], "utf8");
|
|
801
|
+
touched.add(file);
|
|
802
|
+
}
|
|
803
|
+
applied += 1;
|
|
804
|
+
warnings.push(`Applied add-only segment by writing files directly: ${addTargets.join(", ")}`);
|
|
805
|
+
continue;
|
|
806
|
+
}
|
|
807
|
+
catch (writeError) {
|
|
808
|
+
warnings.push(`Patch segment failed and fallback write failed (${segmentFiles.join(", ") || "unknown files"}): ${writeError.message}`);
|
|
809
|
+
continue;
|
|
810
|
+
}
|
|
811
|
+
}
|
|
812
|
+
warnings.push(`Patch segment failed (${segmentFiles.join(", ") || "unknown files"}): ${error.message}`);
|
|
813
|
+
}
|
|
814
|
+
}
|
|
815
|
+
}
|
|
816
|
+
if (!applied && warnings.length) {
|
|
817
|
+
return { touched: Array.from(touched), warnings, error: "No patches applied; all were skipped as placeholders." };
|
|
818
|
+
}
|
|
819
|
+
return { touched: Array.from(touched), warnings };
|
|
820
|
+
}
|
|
821
|
+
async applyFileBlocks(files, cwd, dryRun, allowNoop = false) {
|
|
822
|
+
const touched = new Set();
|
|
823
|
+
const warnings = [];
|
|
824
|
+
let applied = 0;
|
|
825
|
+
for (const file of files) {
|
|
826
|
+
const relative = file.path.trim();
|
|
827
|
+
if (!relative) {
|
|
828
|
+
warnings.push("Skipped file block with empty path.");
|
|
829
|
+
continue;
|
|
830
|
+
}
|
|
831
|
+
const resolved = path.resolve(cwd, relative);
|
|
832
|
+
const relativePath = path.relative(cwd, resolved).replace(/\\/g, "/");
|
|
833
|
+
if (relativePath.startsWith("..") || path.isAbsolute(relativePath)) {
|
|
834
|
+
warnings.push(`Skipped file block outside workspace: ${relative}`);
|
|
835
|
+
continue;
|
|
836
|
+
}
|
|
837
|
+
if (fs.existsSync(resolved)) {
|
|
838
|
+
warnings.push(`Skipped file block for existing file: ${relativePath}`);
|
|
839
|
+
continue;
|
|
840
|
+
}
|
|
841
|
+
if (dryRun) {
|
|
842
|
+
touched.add(relativePath);
|
|
843
|
+
applied += 1;
|
|
844
|
+
continue;
|
|
845
|
+
}
|
|
846
|
+
try {
|
|
847
|
+
await fs.promises.mkdir(path.dirname(resolved), { recursive: true });
|
|
848
|
+
await fs.promises.writeFile(resolved, file.content ?? "", "utf8");
|
|
849
|
+
touched.add(relativePath);
|
|
850
|
+
applied += 1;
|
|
851
|
+
}
|
|
852
|
+
catch (error) {
|
|
853
|
+
warnings.push(`Failed to write file block ${relativePath}: ${error.message}`);
|
|
854
|
+
}
|
|
855
|
+
}
|
|
856
|
+
if (!applied && !allowNoop) {
|
|
857
|
+
return {
|
|
858
|
+
touched: Array.from(touched),
|
|
859
|
+
warnings,
|
|
860
|
+
error: "No file blocks were applied.",
|
|
861
|
+
appliedCount: applied,
|
|
862
|
+
};
|
|
863
|
+
}
|
|
864
|
+
return { touched: Array.from(touched), warnings, appliedCount: applied };
|
|
865
|
+
}
|
|
866
|
+
async runTests(commands, cwd) {
|
|
867
|
+
const results = [];
|
|
868
|
+
for (const command of commands) {
|
|
869
|
+
try {
|
|
870
|
+
const { stdout, stderr } = await exec(command, { cwd });
|
|
871
|
+
results.push({ command, stdout, stderr, code: 0 });
|
|
872
|
+
}
|
|
873
|
+
catch (error) {
|
|
874
|
+
results.push({
|
|
875
|
+
command,
|
|
876
|
+
stdout: error.stdout ?? "",
|
|
877
|
+
stderr: error.stderr ?? String(error),
|
|
878
|
+
code: typeof error.code === "number" ? error.code : 1,
|
|
879
|
+
});
|
|
880
|
+
return { ok: false, results };
|
|
881
|
+
}
|
|
882
|
+
}
|
|
883
|
+
return { ok: true, results };
|
|
884
|
+
}
|
|
885
|
+
async workOnTasks(request) {
|
|
886
|
+
await this.ensureMcoda();
|
|
887
|
+
const agentStream = request.agentStream !== false;
|
|
888
|
+
const configuredBaseBranch = request.baseBranch ?? this.workspace.config?.branch;
|
|
889
|
+
const baseBranch = DEFAULT_BASE_BRANCH;
|
|
890
|
+
const baseBranchWarnings = configuredBaseBranch && configuredBaseBranch !== baseBranch
|
|
891
|
+
? [`Base branch forced to ${baseBranch}; ignoring configured ${configuredBaseBranch}.`]
|
|
892
|
+
: [];
|
|
893
|
+
const commandRun = await this.deps.jobService.startCommandRun("work-on-tasks", request.projectKey, {
|
|
894
|
+
taskIds: request.taskKeys,
|
|
895
|
+
});
|
|
896
|
+
const job = await this.deps.jobService.startJob("work", commandRun.id, request.projectKey, {
|
|
897
|
+
commandName: "work-on-tasks",
|
|
898
|
+
payload: {
|
|
899
|
+
projectKey: request.projectKey,
|
|
900
|
+
epicKey: request.epicKey,
|
|
901
|
+
storyKey: request.storyKey,
|
|
902
|
+
tasks: request.taskKeys,
|
|
903
|
+
statusFilter: request.statusFilter,
|
|
904
|
+
limit: request.limit,
|
|
905
|
+
parallel: request.parallel,
|
|
906
|
+
noCommit: request.noCommit ?? false,
|
|
907
|
+
dryRun: request.dryRun ?? false,
|
|
908
|
+
agent: request.agentName,
|
|
909
|
+
agentStream,
|
|
910
|
+
},
|
|
911
|
+
});
|
|
912
|
+
let selection;
|
|
913
|
+
let storyPointsProcessed = 0;
|
|
914
|
+
try {
|
|
915
|
+
await this.checkoutBaseBranch(baseBranch);
|
|
916
|
+
selection = await this.selectionService.selectTasks({
|
|
917
|
+
projectKey: request.projectKey,
|
|
918
|
+
epicKey: request.epicKey,
|
|
919
|
+
storyKey: request.storyKey,
|
|
920
|
+
taskKeys: request.taskKeys,
|
|
921
|
+
statusFilter: request.statusFilter,
|
|
922
|
+
limit: request.limit,
|
|
923
|
+
parallel: request.parallel,
|
|
924
|
+
});
|
|
925
|
+
await this.checkpoint(job.id, "selection", {
|
|
926
|
+
ordered: selection.ordered.map((t) => t.task.key),
|
|
927
|
+
blocked: selection.blocked.map((t) => t.task.key),
|
|
928
|
+
});
|
|
929
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", {
|
|
930
|
+
payload: {
|
|
931
|
+
...(job.payload ?? {}),
|
|
932
|
+
selection: selection.ordered.map((t) => t.task.key),
|
|
933
|
+
blocked: selection.blocked.map((t) => t.task.key),
|
|
934
|
+
},
|
|
935
|
+
totalItems: selection.ordered.length,
|
|
936
|
+
processedItems: 0,
|
|
937
|
+
});
|
|
938
|
+
const results = [];
|
|
939
|
+
const warnings = [...baseBranchWarnings, ...selection.warnings];
|
|
940
|
+
const agent = await this.resolveAgent(request.agentName);
|
|
941
|
+
const prompts = await this.loadPrompts(agent.id);
|
|
942
|
+
const formatSessionId = (iso) => {
|
|
943
|
+
const date = new Date(iso);
|
|
944
|
+
const pad = (value) => String(value).padStart(2, "0");
|
|
945
|
+
return `${date.getFullYear()}${pad(date.getMonth() + 1)}${pad(date.getDate())}_${pad(date.getHours())}${pad(date.getMinutes())}${pad(date.getSeconds())}`;
|
|
946
|
+
};
|
|
947
|
+
const formatDuration = (ms) => {
|
|
948
|
+
const totalSeconds = Math.max(0, Math.floor(ms / 1000));
|
|
949
|
+
const seconds = totalSeconds % 60;
|
|
950
|
+
const minutesTotal = Math.floor(totalSeconds / 60);
|
|
951
|
+
const minutes = minutesTotal % 60;
|
|
952
|
+
const hours = Math.floor(minutesTotal / 60);
|
|
953
|
+
if (hours > 0)
|
|
954
|
+
return `${hours}H ${minutes}M ${seconds}S`;
|
|
955
|
+
return `${minutes}M ${seconds}S`;
|
|
956
|
+
};
|
|
957
|
+
const formatCount = (value) => value.toLocaleString("en-US");
|
|
958
|
+
const emitLine = (line) => {
|
|
959
|
+
if (request.onAgentChunk) {
|
|
960
|
+
request.onAgentChunk(`${line}\n`);
|
|
961
|
+
return;
|
|
962
|
+
}
|
|
963
|
+
console.info(line);
|
|
964
|
+
};
|
|
965
|
+
const emitBlank = () => emitLine("");
|
|
966
|
+
const resolveProvider = (adapter) => {
|
|
967
|
+
if (!adapter)
|
|
968
|
+
return "n/a";
|
|
969
|
+
const trimmed = adapter.trim();
|
|
970
|
+
if (!trimmed)
|
|
971
|
+
return "n/a";
|
|
972
|
+
if (trimmed.includes("-"))
|
|
973
|
+
return trimmed.split("-")[0];
|
|
974
|
+
return trimmed;
|
|
975
|
+
};
|
|
976
|
+
const resolveReasoning = (config) => {
|
|
977
|
+
if (!config)
|
|
978
|
+
return "n/a";
|
|
979
|
+
const raw = config.reasoning ?? config.thinking;
|
|
980
|
+
if (typeof raw === "string")
|
|
981
|
+
return raw;
|
|
982
|
+
if (typeof raw === "boolean")
|
|
983
|
+
return raw ? "enabled" : "disabled";
|
|
984
|
+
return "n/a";
|
|
985
|
+
};
|
|
986
|
+
const emitTaskStart = (details) => {
|
|
987
|
+
emitLine("╭──────────────────────────────────────────────────────────╮");
|
|
988
|
+
emitLine("│ START OF TASK │");
|
|
989
|
+
emitLine("╰──────────────────────────────────────────────────────────╯");
|
|
990
|
+
emitLine(` [🪪] Start Task ID: ${details.taskKey}`);
|
|
991
|
+
emitLine(` [👹] Alias: ${details.alias}`);
|
|
992
|
+
emitLine(` [ℹ️] Summary: ${details.summary}`);
|
|
993
|
+
emitLine(` [🤖] Model: ${details.model}`);
|
|
994
|
+
emitLine(` [🕹️] Provider: ${details.provider}`);
|
|
995
|
+
emitLine(` [🧩] Step: ${details.step}`);
|
|
996
|
+
emitLine(` [🧠] Reasoning: ${details.reasoning}`);
|
|
997
|
+
emitLine(` [📁] Workdir: ${details.workdir}`);
|
|
998
|
+
emitLine(` [🔑] Session: ${details.sessionId}`);
|
|
999
|
+
emitBlank();
|
|
1000
|
+
emitLine(" ░░░░░ START OF A NEW TASK ░░░░░");
|
|
1001
|
+
emitBlank();
|
|
1002
|
+
emitLine(` [STEP ${details.step}] [MODEL ${details.model}]`);
|
|
1003
|
+
emitBlank();
|
|
1004
|
+
emitBlank();
|
|
1005
|
+
};
|
|
1006
|
+
const emitTaskEnd = async (details) => {
|
|
1007
|
+
const tokensTotal = details.tokensPrompt + details.tokensCompletion;
|
|
1008
|
+
const promptEstimate = Math.max(1, details.promptEstimate);
|
|
1009
|
+
const usagePercent = (tokensTotal / promptEstimate) * 100;
|
|
1010
|
+
const completion = details.status === "succeeded" ? 100 : 0;
|
|
1011
|
+
const completionBar = "💰".repeat(15);
|
|
1012
|
+
const statusLabel = details.status === "succeeded"
|
|
1013
|
+
? "COMPLETED"
|
|
1014
|
+
: details.status === "skipped"
|
|
1015
|
+
? "SKIPPED"
|
|
1016
|
+
: details.status === "blocked"
|
|
1017
|
+
? "BLOCKED"
|
|
1018
|
+
: "FAILED";
|
|
1019
|
+
const hasRemote = await this.vcs.hasRemote(this.workspace.workspaceRoot);
|
|
1020
|
+
const tracking = details.taskBranch ? (hasRemote ? `origin/${details.taskBranch}` : "n/a") : "n/a";
|
|
1021
|
+
const headSha = details.headSha ?? "n/a";
|
|
1022
|
+
const baseLabel = details.baseBranch ?? baseBranch;
|
|
1023
|
+
emitLine("╭──────────────────────────────────────────────────────────╮");
|
|
1024
|
+
emitLine("│ END OF TASK │");
|
|
1025
|
+
emitLine("╰──────────────────────────────────────────────────────────╯");
|
|
1026
|
+
emitLine(` 👏🏼 TASK ${details.taskKey} | 📜 STATUS ${statusLabel} | 🏠 TERMINAL ${details.terminal} | ⚡ SP ${details.storyPoints ?? 0} | ⌛ TIME ${formatDuration(details.elapsedMs)}`);
|
|
1027
|
+
emitBlank();
|
|
1028
|
+
emitLine(` [${completionBar}] ${completion.toFixed(1)}% Complete`);
|
|
1029
|
+
emitLine(` Tokens used: ${formatCount(tokensTotal)}`);
|
|
1030
|
+
emitLine(` ${usagePercent.toFixed(1)}% used vs prompt est (x${(tokensTotal / promptEstimate).toFixed(2)})`);
|
|
1031
|
+
emitLine(` Est. tokens: ${formatCount(promptEstimate)}`);
|
|
1032
|
+
emitBlank();
|
|
1033
|
+
emitLine("🌿 Git summary");
|
|
1034
|
+
emitLine("────────────────────────────────────────────────────────────");
|
|
1035
|
+
emitLine(` [🎋] Task branch: ${details.taskBranch ?? "n/a"}`);
|
|
1036
|
+
emitLine(` [🗿] Tracking: ${tracking}`);
|
|
1037
|
+
emitLine(` [🚀] Merge→dev: ${details.mergeStatus}`);
|
|
1038
|
+
emitLine(` [🐲] HEAD: ${headSha}`);
|
|
1039
|
+
emitLine(` [♨️] Files: ${details.touchedFiles}`);
|
|
1040
|
+
emitLine(` [🔑] Base: ${baseLabel}`);
|
|
1041
|
+
emitLine(" [🧾] Git log: n/a");
|
|
1042
|
+
emitBlank();
|
|
1043
|
+
emitLine("🗂 Artifacts");
|
|
1044
|
+
emitLine("────────────────────────────────────────────────────────────");
|
|
1045
|
+
emitLine(" • History: n/a");
|
|
1046
|
+
emitLine(" • Git log: n/a");
|
|
1047
|
+
emitBlank();
|
|
1048
|
+
emitLine(" ░░░░░ END OF THE TASK WORK ░░░░░");
|
|
1049
|
+
emitBlank();
|
|
1050
|
+
};
|
|
1051
|
+
for (const [index, task] of selection.ordered.entries()) {
|
|
1052
|
+
const startedAt = new Date().toISOString();
|
|
1053
|
+
const taskRun = await this.deps.workspaceRepo.createTaskRun({
|
|
1054
|
+
taskId: task.task.id,
|
|
1055
|
+
command: "work-on-tasks",
|
|
1056
|
+
jobId: job.id,
|
|
1057
|
+
commandRunId: commandRun.id,
|
|
1058
|
+
agentId: agent.id,
|
|
1059
|
+
status: "running",
|
|
1060
|
+
startedAt,
|
|
1061
|
+
storyPointsAtRun: task.task.storyPoints ?? null,
|
|
1062
|
+
gitBranch: task.task.vcsBranch ?? null,
|
|
1063
|
+
gitBaseBranch: task.task.vcsBaseBranch ?? null,
|
|
1064
|
+
gitCommitSha: task.task.vcsLastCommitSha ?? null,
|
|
1065
|
+
});
|
|
1066
|
+
const sessionId = formatSessionId(startedAt);
|
|
1067
|
+
const taskAlias = `Working on task ${task.task.key}`;
|
|
1068
|
+
const taskSummary = task.task.title || task.task.description || "(none)";
|
|
1069
|
+
const modelLabel = agent.defaultModel ?? "(default)";
|
|
1070
|
+
const providerLabel = resolveProvider(agent.adapter);
|
|
1071
|
+
const reasoningLabel = resolveReasoning(agent.config);
|
|
1072
|
+
const stepLabel = "patch";
|
|
1073
|
+
const taskStartMs = Date.now();
|
|
1074
|
+
let taskStatus = null;
|
|
1075
|
+
let tokensPromptTotal = 0;
|
|
1076
|
+
let tokensCompletionTotal = 0;
|
|
1077
|
+
let promptEstimateBase = 0;
|
|
1078
|
+
let promptEstimateTotal = 0;
|
|
1079
|
+
let mergeStatus = "skipped";
|
|
1080
|
+
let patchApplied = false;
|
|
1081
|
+
let touched = [];
|
|
1082
|
+
let taskBranchName = task.task.vcsBranch ?? null;
|
|
1083
|
+
let baseBranchName = task.task.vcsBaseBranch ?? baseBranch;
|
|
1084
|
+
let branchInfo = {
|
|
1085
|
+
branch: task.task.vcsBranch ?? "",
|
|
1086
|
+
base: task.task.vcsBaseBranch ?? baseBranch,
|
|
1087
|
+
};
|
|
1088
|
+
let headSha = task.task.vcsLastCommitSha ?? null;
|
|
1089
|
+
let taskEndEmitted = false;
|
|
1090
|
+
emitTaskStart({
|
|
1091
|
+
taskKey: task.task.key,
|
|
1092
|
+
alias: taskAlias,
|
|
1093
|
+
summary: taskSummary,
|
|
1094
|
+
model: modelLabel,
|
|
1095
|
+
provider: providerLabel,
|
|
1096
|
+
step: stepLabel,
|
|
1097
|
+
reasoning: reasoningLabel,
|
|
1098
|
+
workdir: this.workspace.workspaceRoot,
|
|
1099
|
+
sessionId,
|
|
1100
|
+
});
|
|
1101
|
+
const emitTaskEndOnce = async () => {
|
|
1102
|
+
if (taskEndEmitted)
|
|
1103
|
+
return;
|
|
1104
|
+
taskEndEmitted = true;
|
|
1105
|
+
const status = taskStatus ?? "failed";
|
|
1106
|
+
const terminal = status === "succeeded"
|
|
1107
|
+
? touched.length
|
|
1108
|
+
? "COMPLETED_WITH_CHANGES"
|
|
1109
|
+
: "COMPLETED_NO_CHANGES"
|
|
1110
|
+
: status === "blocked"
|
|
1111
|
+
? "BLOCKED"
|
|
1112
|
+
: status === "skipped"
|
|
1113
|
+
? "SKIPPED"
|
|
1114
|
+
: "FAILED";
|
|
1115
|
+
let resolvedHead = headSha;
|
|
1116
|
+
if (!resolvedHead) {
|
|
1117
|
+
try {
|
|
1118
|
+
resolvedHead = await this.vcs.lastCommitSha(this.workspace.workspaceRoot);
|
|
1119
|
+
}
|
|
1120
|
+
catch {
|
|
1121
|
+
resolvedHead = null;
|
|
1122
|
+
}
|
|
1123
|
+
}
|
|
1124
|
+
await emitTaskEnd({
|
|
1125
|
+
taskKey: task.task.key,
|
|
1126
|
+
status,
|
|
1127
|
+
terminal,
|
|
1128
|
+
storyPoints: task.task.storyPoints ?? 0,
|
|
1129
|
+
elapsedMs: Date.now() - taskStartMs,
|
|
1130
|
+
tokensPrompt: tokensPromptTotal,
|
|
1131
|
+
tokensCompletion: tokensCompletionTotal,
|
|
1132
|
+
promptEstimate: promptEstimateTotal || promptEstimateBase,
|
|
1133
|
+
taskBranch: taskBranchName || null,
|
|
1134
|
+
baseBranch: baseBranchName || baseBranch,
|
|
1135
|
+
touchedFiles: touched.length,
|
|
1136
|
+
mergeStatus,
|
|
1137
|
+
headSha: resolvedHead,
|
|
1138
|
+
});
|
|
1139
|
+
};
|
|
1140
|
+
const phaseTimers = {};
|
|
1141
|
+
const startPhase = async (phase, details) => {
|
|
1142
|
+
phaseTimers[phase] = Date.now();
|
|
1143
|
+
await this.updateTaskPhase(job.id, taskRun.id, task.task.key, phase, "start", details);
|
|
1144
|
+
};
|
|
1145
|
+
const endPhase = async (phase, details) => {
|
|
1146
|
+
const started = phaseTimers[phase];
|
|
1147
|
+
const durationSeconds = started ? Math.round(((Date.now() - started) / 1000) * 1000) / 1000 : undefined;
|
|
1148
|
+
await this.updateTaskPhase(job.id, taskRun.id, task.task.key, phase, "end", {
|
|
1149
|
+
...(details ?? {}),
|
|
1150
|
+
durationSeconds,
|
|
1151
|
+
});
|
|
1152
|
+
};
|
|
1153
|
+
try {
|
|
1154
|
+
await startPhase("selection", {
|
|
1155
|
+
dependencies: task.dependencies.keys,
|
|
1156
|
+
blockedReason: task.blockedReason,
|
|
1157
|
+
});
|
|
1158
|
+
await this.logTask(taskRun.id, `Selected task ${task.task.key}`, "selection", {
|
|
1159
|
+
dependencies: task.dependencies.keys,
|
|
1160
|
+
blockedReason: task.blockedReason,
|
|
1161
|
+
});
|
|
1162
|
+
if (task.blockedReason && !request.dryRun) {
|
|
1163
|
+
await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "selection", "error", {
|
|
1164
|
+
blockedReason: task.blockedReason,
|
|
1165
|
+
});
|
|
1166
|
+
await this.stateService.markBlocked(task.task, task.blockedReason);
|
|
1167
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, {
|
|
1168
|
+
status: "failed",
|
|
1169
|
+
finishedAt: new Date().toISOString(),
|
|
1170
|
+
});
|
|
1171
|
+
results.push({ taskKey: task.task.key, status: "blocked", notes: task.blockedReason });
|
|
1172
|
+
taskStatus = "blocked";
|
|
1173
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1174
|
+
await emitTaskEndOnce();
|
|
1175
|
+
continue;
|
|
1176
|
+
}
|
|
1177
|
+
await endPhase("selection");
|
|
1178
|
+
}
|
|
1179
|
+
catch (error) {
|
|
1180
|
+
const message = `Selection phase failed: ${error.message}`;
|
|
1181
|
+
try {
|
|
1182
|
+
await this.logTask(taskRun.id, message, "selection");
|
|
1183
|
+
}
|
|
1184
|
+
catch {
|
|
1185
|
+
/* ignore log failures */
|
|
1186
|
+
}
|
|
1187
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, {
|
|
1188
|
+
status: "failed",
|
|
1189
|
+
finishedAt: new Date().toISOString(),
|
|
1190
|
+
});
|
|
1191
|
+
results.push({ taskKey: task.task.key, status: "failed", notes: message });
|
|
1192
|
+
taskStatus = "failed";
|
|
1193
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1194
|
+
await emitTaskEndOnce();
|
|
1195
|
+
continue;
|
|
1196
|
+
}
|
|
1197
|
+
let lockAcquired = false;
|
|
1198
|
+
if (!request.dryRun) {
|
|
1199
|
+
const ttlSeconds = Math.max(1, TASK_LOCK_TTL_SECONDS);
|
|
1200
|
+
const lockResult = await this.deps.workspaceRepo.tryAcquireTaskLock(task.task.id, taskRun.id, job.id, ttlSeconds);
|
|
1201
|
+
if (!lockResult.acquired) {
|
|
1202
|
+
await this.logTask(taskRun.id, "Task already locked by another run; skipping.", "vcs", {
|
|
1203
|
+
lock: lockResult.lock ?? null,
|
|
1204
|
+
});
|
|
1205
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, {
|
|
1206
|
+
status: "cancelled",
|
|
1207
|
+
finishedAt: new Date().toISOString(),
|
|
1208
|
+
});
|
|
1209
|
+
results.push({ taskKey: task.task.key, status: "skipped", notes: "task_locked" });
|
|
1210
|
+
taskStatus = "skipped";
|
|
1211
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1212
|
+
await emitTaskEndOnce();
|
|
1213
|
+
continue;
|
|
1214
|
+
}
|
|
1215
|
+
lockAcquired = true;
|
|
1216
|
+
}
|
|
1217
|
+
try {
|
|
1218
|
+
const metadata = task.task.metadata ?? {};
|
|
1219
|
+
let allowedFiles = Array.isArray(metadata.files) ? normalizePaths(this.workspace.workspaceRoot, metadata.files) : [];
|
|
1220
|
+
const testCommands = Array.isArray(metadata.tests) ? metadata.tests : [];
|
|
1221
|
+
let mergeConflicts = [];
|
|
1222
|
+
let remoteSyncNote = "";
|
|
1223
|
+
const softFailures = [];
|
|
1224
|
+
let lastLockRefresh = Date.now();
|
|
1225
|
+
const getLockRefreshIntervalMs = () => {
|
|
1226
|
+
const ttlSeconds = Math.max(1, TASK_LOCK_TTL_SECONDS);
|
|
1227
|
+
const ttlMs = ttlSeconds * 1000;
|
|
1228
|
+
return Math.max(250, Math.min(ttlMs - 250, Math.floor(ttlMs / 3)));
|
|
1229
|
+
};
|
|
1230
|
+
const refreshLock = async (label, force = false) => {
|
|
1231
|
+
if (!lockAcquired)
|
|
1232
|
+
return true;
|
|
1233
|
+
const now = Date.now();
|
|
1234
|
+
if (!force && now - lastLockRefresh < getLockRefreshIntervalMs())
|
|
1235
|
+
return true;
|
|
1236
|
+
try {
|
|
1237
|
+
const ttlSeconds = Math.max(1, TASK_LOCK_TTL_SECONDS);
|
|
1238
|
+
const refreshed = await this.deps.workspaceRepo.refreshTaskLock(task.task.id, taskRun.id, ttlSeconds);
|
|
1239
|
+
if (!refreshed) {
|
|
1240
|
+
await this.logTask(taskRun.id, `Task lock lost during ${label}; another run may have taken it.`, "vcs", {
|
|
1241
|
+
reason: "lock_stolen",
|
|
1242
|
+
});
|
|
1243
|
+
}
|
|
1244
|
+
if (refreshed) {
|
|
1245
|
+
lastLockRefresh = now;
|
|
1246
|
+
}
|
|
1247
|
+
return refreshed;
|
|
1248
|
+
}
|
|
1249
|
+
catch (error) {
|
|
1250
|
+
await this.logTask(taskRun.id, `Failed to refresh task lock (${label}); treating as lock loss.`, "vcs", {
|
|
1251
|
+
error: error.message,
|
|
1252
|
+
reason: "refresh_failed",
|
|
1253
|
+
});
|
|
1254
|
+
return false;
|
|
1255
|
+
}
|
|
1256
|
+
return true;
|
|
1257
|
+
};
|
|
1258
|
+
if (!request.dryRun) {
|
|
1259
|
+
try {
|
|
1260
|
+
branchInfo = await this.ensureBranches(task.task.key, baseBranch, taskRun.id);
|
|
1261
|
+
taskBranchName = branchInfo.branch || taskBranchName;
|
|
1262
|
+
baseBranchName = branchInfo.base || baseBranchName;
|
|
1263
|
+
mergeConflicts = branchInfo.mergeConflicts ?? [];
|
|
1264
|
+
remoteSyncNote = branchInfo.remoteSyncNote ?? "";
|
|
1265
|
+
if (mergeConflicts.length && allowedFiles.length) {
|
|
1266
|
+
allowedFiles = Array.from(new Set([...allowedFiles, ...mergeConflicts.map((f) => f.replace(/\\/g, "/"))]));
|
|
1267
|
+
}
|
|
1268
|
+
await this.deps.workspaceRepo.updateTask(task.task.id, {
|
|
1269
|
+
vcsBranch: branchInfo.branch,
|
|
1270
|
+
vcsBaseBranch: branchInfo.base,
|
|
1271
|
+
});
|
|
1272
|
+
await this.logTask(taskRun.id, `Using branch ${branchInfo.branch} (base ${branchInfo.base})`, "vcs");
|
|
1273
|
+
}
|
|
1274
|
+
catch (error) {
|
|
1275
|
+
const message = `Failed to prepare branches: ${error.message}`;
|
|
1276
|
+
await this.logTask(taskRun.id, message, "vcs");
|
|
1277
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
|
|
1278
|
+
results.push({ taskKey: task.task.key, status: "failed", notes: message });
|
|
1279
|
+
taskStatus = "failed";
|
|
1280
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1281
|
+
continue;
|
|
1282
|
+
}
|
|
1283
|
+
}
|
|
1284
|
+
await startPhase("context", { allowedFiles, tests: testCommands });
|
|
1285
|
+
const docLinks = Array.isArray(metadata.doc_links) ? metadata.doc_links : [];
|
|
1286
|
+
const { summary: docSummary, warnings: docWarnings } = await this.gatherDocContext(request.projectKey, docLinks);
|
|
1287
|
+
if (docWarnings.length) {
|
|
1288
|
+
warnings.push(...docWarnings);
|
|
1289
|
+
await this.logTask(taskRun.id, docWarnings.join("; "), "docdex");
|
|
1290
|
+
}
|
|
1291
|
+
await endPhase("context", { docWarnings, docSummary: Boolean(docSummary) });
|
|
1292
|
+
await startPhase("prompt", { docSummary: Boolean(docSummary), agent: agent.id });
|
|
1293
|
+
const conflictNote = mergeConflicts.length
|
|
1294
|
+
? `Merge conflicts detected in: ${mergeConflicts.join(", ")}. Resolve these conflicts before any other task work. Remove conflict markers and ensure the files are consistent.`
|
|
1295
|
+
: "";
|
|
1296
|
+
const promptBase = this.buildPrompt(task, docSummary, allowedFiles);
|
|
1297
|
+
const notes = [remoteSyncNote, conflictNote].filter(Boolean).join("\n");
|
|
1298
|
+
const prompt = notes ? `${notes}\n\n${promptBase}` : promptBase;
|
|
1299
|
+
const commandPrompt = prompts.commandPrompt ?? "";
|
|
1300
|
+
const systemPrompt = [prompts.jobPrompt, prompts.characterPrompt, commandPrompt].filter(Boolean).join("\n\n");
|
|
1301
|
+
await this.logTask(taskRun.id, `System prompt:\n${systemPrompt || "(none)"}`, "prompt");
|
|
1302
|
+
await this.logTask(taskRun.id, `Task prompt:\n${prompt}`, "prompt");
|
|
1303
|
+
promptEstimateBase = estimateTokens(systemPrompt + prompt);
|
|
1304
|
+
await endPhase("prompt", { hasSystemPrompt: Boolean(systemPrompt) });
|
|
1305
|
+
if (request.dryRun) {
|
|
1306
|
+
await this.logTask(taskRun.id, "Dry-run enabled; skipping execution.", "execution");
|
|
1307
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, {
|
|
1308
|
+
status: "cancelled",
|
|
1309
|
+
finishedAt: new Date().toISOString(),
|
|
1310
|
+
});
|
|
1311
|
+
results.push({ taskKey: task.task.key, status: "skipped", notes: "dry_run" });
|
|
1312
|
+
taskStatus = "skipped";
|
|
1313
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1314
|
+
continue;
|
|
1315
|
+
}
|
|
1316
|
+
try {
|
|
1317
|
+
await this.stateService.transitionToInProgress(task.task);
|
|
1318
|
+
}
|
|
1319
|
+
catch (error) {
|
|
1320
|
+
await this.logTask(taskRun.id, `Failed to move task to in_progress: ${error.message}`, "state");
|
|
1321
|
+
}
|
|
1322
|
+
const streamChunk = (text) => {
|
|
1323
|
+
if (!text)
|
|
1324
|
+
return;
|
|
1325
|
+
if (request.onAgentChunk) {
|
|
1326
|
+
request.onAgentChunk(text);
|
|
1327
|
+
return;
|
|
1328
|
+
}
|
|
1329
|
+
if (agentStream) {
|
|
1330
|
+
process.stdout.write(text);
|
|
1331
|
+
}
|
|
1332
|
+
};
|
|
1333
|
+
const invokeAgentOnce = async (input, phaseLabel) => {
|
|
1334
|
+
let output = "";
|
|
1335
|
+
const started = Date.now();
|
|
1336
|
+
if (agentStream && this.deps.agentService.invokeStream) {
|
|
1337
|
+
const stream = await this.deps.agentService.invokeStream(agent.id, {
|
|
1338
|
+
input,
|
|
1339
|
+
metadata: { taskKey: task.task.key },
|
|
1340
|
+
});
|
|
1341
|
+
let pollLockLost = false;
|
|
1342
|
+
const refreshTimer = setInterval(() => {
|
|
1343
|
+
void refreshLock("agent_stream_poll").then((ok) => {
|
|
1344
|
+
if (!ok)
|
|
1345
|
+
pollLockLost = true;
|
|
1346
|
+
});
|
|
1347
|
+
}, getLockRefreshIntervalMs());
|
|
1348
|
+
try {
|
|
1349
|
+
for await (const chunk of stream) {
|
|
1350
|
+
output += chunk.output ?? "";
|
|
1351
|
+
streamChunk(chunk.output);
|
|
1352
|
+
await this.logTask(taskRun.id, chunk.output ?? "", phaseLabel);
|
|
1353
|
+
if (!(await refreshLock("agent_stream"))) {
|
|
1354
|
+
await this.logTask(taskRun.id, "Aborting task: lock lost during agent streaming.", "vcs");
|
|
1355
|
+
throw new Error("Task lock lost during agent stream.");
|
|
1356
|
+
}
|
|
1357
|
+
}
|
|
1358
|
+
}
|
|
1359
|
+
finally {
|
|
1360
|
+
clearInterval(refreshTimer);
|
|
1361
|
+
}
|
|
1362
|
+
if (pollLockLost) {
|
|
1363
|
+
await this.logTask(taskRun.id, "Aborting task: lock lost during agent stream.", "vcs");
|
|
1364
|
+
throw new Error("Task lock lost during agent stream.");
|
|
1365
|
+
}
|
|
1366
|
+
}
|
|
1367
|
+
else {
|
|
1368
|
+
let pollLockLost = false;
|
|
1369
|
+
let rejectLockLost = null;
|
|
1370
|
+
const lockLostPromise = new Promise((_, reject) => {
|
|
1371
|
+
rejectLockLost = reject;
|
|
1372
|
+
});
|
|
1373
|
+
const refreshTimer = setInterval(() => {
|
|
1374
|
+
void refreshLock("agent_poll").then((ok) => {
|
|
1375
|
+
if (ok || pollLockLost)
|
|
1376
|
+
return;
|
|
1377
|
+
pollLockLost = true;
|
|
1378
|
+
if (rejectLockLost)
|
|
1379
|
+
rejectLockLost(new Error("Task lock lost during agent invoke."));
|
|
1380
|
+
});
|
|
1381
|
+
}, getLockRefreshIntervalMs());
|
|
1382
|
+
const invokePromise = this.deps.agentService
|
|
1383
|
+
.invoke(agent.id, { input, metadata: { taskKey: task.task.key } })
|
|
1384
|
+
.catch((error) => {
|
|
1385
|
+
if (pollLockLost)
|
|
1386
|
+
return null;
|
|
1387
|
+
throw error;
|
|
1388
|
+
});
|
|
1389
|
+
try {
|
|
1390
|
+
const result = await Promise.race([invokePromise, lockLostPromise]);
|
|
1391
|
+
if (result) {
|
|
1392
|
+
output = result.output ?? "";
|
|
1393
|
+
}
|
|
1394
|
+
}
|
|
1395
|
+
finally {
|
|
1396
|
+
clearInterval(refreshTimer);
|
|
1397
|
+
}
|
|
1398
|
+
if (pollLockLost) {
|
|
1399
|
+
await this.logTask(taskRun.id, "Aborting task: lock lost during agent invoke.", "vcs");
|
|
1400
|
+
throw new Error("Task lock lost during agent invoke.");
|
|
1401
|
+
}
|
|
1402
|
+
streamChunk(output);
|
|
1403
|
+
await this.logTask(taskRun.id, output, phaseLabel);
|
|
1404
|
+
}
|
|
1405
|
+
return { output, durationSeconds: (Date.now() - started) / 1000 };
|
|
1406
|
+
};
|
|
1407
|
+
let agentOutput = "";
|
|
1408
|
+
let agentDuration = 0;
|
|
1409
|
+
let triedRetry = false;
|
|
1410
|
+
const agentInput = `${systemPrompt}\n\n${prompt}`;
|
|
1411
|
+
try {
|
|
1412
|
+
await startPhase("agent", { agent: agent.id, stream: agentStream });
|
|
1413
|
+
const first = await invokeAgentOnce(agentInput, "agent");
|
|
1414
|
+
agentOutput = first.output;
|
|
1415
|
+
agentDuration = first.durationSeconds;
|
|
1416
|
+
await endPhase("agent", { agentDurationSeconds: agentDuration });
|
|
1417
|
+
if (!(await refreshLock("agent"))) {
|
|
1418
|
+
await this.logTask(taskRun.id, "Aborting task: lock lost after agent completion.", "vcs");
|
|
1419
|
+
throw new Error("Task lock lost after agent completion.");
|
|
1420
|
+
}
|
|
1421
|
+
}
|
|
1422
|
+
catch (error) {
|
|
1423
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
1424
|
+
if (/task lock lost/i.test(message)) {
|
|
1425
|
+
throw error;
|
|
1426
|
+
}
|
|
1427
|
+
await this.logTask(taskRun.id, `Agent invocation failed: ${message}`, "agent");
|
|
1428
|
+
await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "agent", "error", { error: message });
|
|
1429
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, {
|
|
1430
|
+
status: "failed",
|
|
1431
|
+
finishedAt: new Date().toISOString(),
|
|
1432
|
+
});
|
|
1433
|
+
results.push({ taskKey: task.task.key, status: "failed", notes: message });
|
|
1434
|
+
taskStatus = "failed";
|
|
1435
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1436
|
+
continue;
|
|
1437
|
+
}
|
|
1438
|
+
const recordUsage = async (phase, output, durationSeconds, promptText) => {
|
|
1439
|
+
const promptTokens = estimateTokens(promptText);
|
|
1440
|
+
const completionTokens = estimateTokens(output);
|
|
1441
|
+
tokensPromptTotal += promptTokens;
|
|
1442
|
+
tokensCompletionTotal += completionTokens;
|
|
1443
|
+
promptEstimateTotal += promptTokens;
|
|
1444
|
+
await this.recordTokenUsage({
|
|
1445
|
+
agentId: agent.id,
|
|
1446
|
+
model: agent.defaultModel,
|
|
1447
|
+
jobId: job.id,
|
|
1448
|
+
commandRunId: commandRun.id,
|
|
1449
|
+
taskRunId: taskRun.id,
|
|
1450
|
+
taskId: task.task.id,
|
|
1451
|
+
projectId: selection.project?.id,
|
|
1452
|
+
tokensPrompt: promptTokens,
|
|
1453
|
+
tokensCompletion: completionTokens,
|
|
1454
|
+
phase,
|
|
1455
|
+
durationSeconds,
|
|
1456
|
+
});
|
|
1457
|
+
};
|
|
1458
|
+
await recordUsage("agent", agentOutput, agentDuration, agentInput);
|
|
1459
|
+
let patches = extractPatches(agentOutput);
|
|
1460
|
+
let fileBlocks = extractFileBlocks(agentOutput);
|
|
1461
|
+
if (patches.length === 0 && fileBlocks.length === 0 && !triedRetry) {
|
|
1462
|
+
triedRetry = true;
|
|
1463
|
+
await this.logTask(taskRun.id, "Agent output did not include a patch or file blocks; retrying with explicit output instructions.", "agent");
|
|
1464
|
+
try {
|
|
1465
|
+
const retryInput = `${systemPrompt}\n\n${prompt}\n\nOutput only code changes. If editing existing files, output a unified diff inside \`\`\`patch\`\`\` fences. If creating new files, output FILE blocks in this format:\nFILE: path/to/file.ext\n\`\`\`\n<full file contents>\n\`\`\`\nDo not include analysis or narration.`;
|
|
1466
|
+
const retry = await invokeAgentOnce(retryInput, "agent");
|
|
1467
|
+
agentOutput = retry.output;
|
|
1468
|
+
agentDuration += retry.durationSeconds;
|
|
1469
|
+
await recordUsage("agent_retry", retry.output, retry.durationSeconds, retryInput);
|
|
1470
|
+
patches = extractPatches(agentOutput);
|
|
1471
|
+
fileBlocks = extractFileBlocks(agentOutput);
|
|
1472
|
+
}
|
|
1473
|
+
catch (error) {
|
|
1474
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
1475
|
+
await this.logTask(taskRun.id, `Agent retry failed: ${message}`, "agent");
|
|
1476
|
+
}
|
|
1477
|
+
}
|
|
1478
|
+
if (patches.length === 0 && fileBlocks.length === 0) {
|
|
1479
|
+
const message = "Agent output did not include a patch or file blocks.";
|
|
1480
|
+
await this.logTask(taskRun.id, message, "agent");
|
|
1481
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
|
|
1482
|
+
await this.stateService.markBlocked(task.task, "missing_patch");
|
|
1483
|
+
results.push({ taskKey: task.task.key, status: "failed", notes: "missing_patch" });
|
|
1484
|
+
taskStatus = "failed";
|
|
1485
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1486
|
+
continue;
|
|
1487
|
+
}
|
|
1488
|
+
if (patches.length || fileBlocks.length) {
|
|
1489
|
+
if (!(await refreshLock("apply_start", true))) {
|
|
1490
|
+
await this.logTask(taskRun.id, "Aborting task: lock lost before apply.", "vcs");
|
|
1491
|
+
throw new Error("Task lock lost before apply.");
|
|
1492
|
+
}
|
|
1493
|
+
const applyDetails = {};
|
|
1494
|
+
if (patches.length)
|
|
1495
|
+
applyDetails.patchCount = patches.length;
|
|
1496
|
+
if (fileBlocks.length)
|
|
1497
|
+
applyDetails.fileCount = fileBlocks.length;
|
|
1498
|
+
if (fileBlocks.length && !patches.length)
|
|
1499
|
+
applyDetails.mode = "direct";
|
|
1500
|
+
await startPhase("apply", applyDetails);
|
|
1501
|
+
let patchApplyError = null;
|
|
1502
|
+
if (patches.length) {
|
|
1503
|
+
const applied = await this.applyPatches(patches, this.workspace.workspaceRoot, request.dryRun ?? false);
|
|
1504
|
+
touched = applied.touched;
|
|
1505
|
+
if (applied.warnings?.length) {
|
|
1506
|
+
await this.logTask(taskRun.id, applied.warnings.join("; "), "patch");
|
|
1507
|
+
}
|
|
1508
|
+
if (applied.error) {
|
|
1509
|
+
patchApplyError = applied.error;
|
|
1510
|
+
await this.logTask(taskRun.id, `Patch apply failed: ${applied.error}`, "patch");
|
|
1511
|
+
if (!fileBlocks.length) {
|
|
1512
|
+
await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "apply", "error", { error: applied.error });
|
|
1513
|
+
await this.stateService.markBlocked(task.task, "patch_failed");
|
|
1514
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
|
|
1515
|
+
results.push({ taskKey: task.task.key, status: "failed", notes: "patch_failed" });
|
|
1516
|
+
taskStatus = "failed";
|
|
1517
|
+
if (!request.dryRun && request.noCommit !== true) {
|
|
1518
|
+
await this.commitPendingChanges(branchInfo, task.task.key, task.task.title, "auto-save (patch_failed)", task.task.id, taskRun.id);
|
|
1519
|
+
}
|
|
1520
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1521
|
+
continue;
|
|
1522
|
+
}
|
|
1523
|
+
}
|
|
1524
|
+
}
|
|
1525
|
+
if (fileBlocks.length) {
|
|
1526
|
+
const allowNoop = patchApplyError === null && touched.length > 0;
|
|
1527
|
+
const applied = await this.applyFileBlocks(fileBlocks, this.workspace.workspaceRoot, request.dryRun ?? false, allowNoop);
|
|
1528
|
+
if (applied.touched.length) {
|
|
1529
|
+
const merged = new Set([...touched, ...applied.touched]);
|
|
1530
|
+
touched = Array.from(merged);
|
|
1531
|
+
}
|
|
1532
|
+
if (applied.warnings?.length) {
|
|
1533
|
+
await this.logTask(taskRun.id, applied.warnings.join("; "), "patch");
|
|
1534
|
+
}
|
|
1535
|
+
if (applied.error) {
|
|
1536
|
+
await this.logTask(taskRun.id, `Direct file apply failed: ${applied.error}`, "patch");
|
|
1537
|
+
await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "apply", "error", { error: applied.error });
|
|
1538
|
+
await this.stateService.markBlocked(task.task, "patch_failed");
|
|
1539
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
|
|
1540
|
+
results.push({ taskKey: task.task.key, status: "failed", notes: "patch_failed" });
|
|
1541
|
+
taskStatus = "failed";
|
|
1542
|
+
if (!request.dryRun && request.noCommit !== true) {
|
|
1543
|
+
await this.commitPendingChanges(branchInfo, task.task.key, task.task.title, "auto-save (patch_failed)", task.task.id, taskRun.id);
|
|
1544
|
+
}
|
|
1545
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1546
|
+
continue;
|
|
1547
|
+
}
|
|
1548
|
+
if (patchApplyError && applied.appliedCount > 0) {
|
|
1549
|
+
await this.logTask(taskRun.id, `Patch apply skipped; continued with file blocks. Reason: ${patchApplyError}`, "patch");
|
|
1550
|
+
patchApplyError = null;
|
|
1551
|
+
}
|
|
1552
|
+
}
|
|
1553
|
+
if (patchApplyError) {
|
|
1554
|
+
await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "apply", "error", { error: patchApplyError });
|
|
1555
|
+
await this.stateService.markBlocked(task.task, "patch_failed");
|
|
1556
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
|
|
1557
|
+
results.push({ taskKey: task.task.key, status: "failed", notes: "patch_failed" });
|
|
1558
|
+
taskStatus = "failed";
|
|
1559
|
+
if (!request.dryRun && request.noCommit !== true) {
|
|
1560
|
+
await this.commitPendingChanges(branchInfo, task.task.key, task.task.title, "auto-save (patch_failed)", task.task.id, taskRun.id);
|
|
1561
|
+
}
|
|
1562
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1563
|
+
continue;
|
|
1564
|
+
}
|
|
1565
|
+
patchApplied = touched.length > 0;
|
|
1566
|
+
await endPhase("apply", { touched });
|
|
1567
|
+
if (!(await refreshLock("apply"))) {
|
|
1568
|
+
await this.logTask(taskRun.id, "Aborting task: lock lost after apply.", "vcs");
|
|
1569
|
+
throw new Error("Task lock lost after apply.");
|
|
1570
|
+
}
|
|
1571
|
+
}
|
|
1572
|
+
if (patchApplied && allowedFiles.length) {
|
|
1573
|
+
const dirtyAfterApply = (await this.vcs.dirtyPaths(this.workspace.workspaceRoot)).filter((p) => !p.startsWith(".mcoda"));
|
|
1574
|
+
const scopeCheck = this.validateScope(allowedFiles, normalizePaths(this.workspace.workspaceRoot, dirtyAfterApply));
|
|
1575
|
+
if (!scopeCheck.ok) {
|
|
1576
|
+
await this.logTask(taskRun.id, scopeCheck.message ?? "Scope violation", "scope");
|
|
1577
|
+
await this.stateService.markBlocked(task.task, "scope_violation");
|
|
1578
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
|
|
1579
|
+
results.push({ taskKey: task.task.key, status: "failed", notes: "scope_violation" });
|
|
1580
|
+
taskStatus = "failed";
|
|
1581
|
+
if (!request.dryRun && request.noCommit !== true && patchApplied) {
|
|
1582
|
+
await this.commitPendingChanges(branchInfo, task.task.key, task.task.title, "auto-save (scope_violation)", task.task.id, taskRun.id);
|
|
1583
|
+
}
|
|
1584
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1585
|
+
continue;
|
|
1586
|
+
}
|
|
1587
|
+
}
|
|
1588
|
+
if (!request.dryRun && testCommands.length && patchApplied) {
|
|
1589
|
+
await startPhase("tests", { commands: testCommands });
|
|
1590
|
+
const testResult = await this.runTests(testCommands, this.workspace.workspaceRoot);
|
|
1591
|
+
await this.logTask(taskRun.id, "Test results", "tests", { results: testResult.results });
|
|
1592
|
+
if (!testResult.ok) {
|
|
1593
|
+
await this.logTask(taskRun.id, "Tests failed; continuing task run with warnings.", "tests");
|
|
1594
|
+
softFailures.push("tests_failed");
|
|
1595
|
+
await endPhase("tests", { results: testResult.results, warning: "tests_failed" });
|
|
1596
|
+
}
|
|
1597
|
+
else {
|
|
1598
|
+
await endPhase("tests", { results: testResult.results });
|
|
1599
|
+
}
|
|
1600
|
+
if (!(await refreshLock("tests"))) {
|
|
1601
|
+
await this.logTask(taskRun.id, "Aborting task: lock lost after tests.", "vcs");
|
|
1602
|
+
throw new Error("Task lock lost after tests.");
|
|
1603
|
+
}
|
|
1604
|
+
}
|
|
1605
|
+
if (!request.dryRun && request.noCommit !== true) {
|
|
1606
|
+
if (!(await refreshLock("vcs_start", true))) {
|
|
1607
|
+
await this.logTask(taskRun.id, "Aborting task: lock lost before VCS phase.", "vcs");
|
|
1608
|
+
throw new Error("Task lock lost before VCS phase.");
|
|
1609
|
+
}
|
|
1610
|
+
await startPhase("vcs", { branch: branchInfo.branch, base: branchInfo.base });
|
|
1611
|
+
try {
|
|
1612
|
+
const dirty = (await this.vcs.dirtyPaths(this.workspace.workspaceRoot)).filter((p) => !p.startsWith(".mcoda"));
|
|
1613
|
+
const toStage = dirty.length ? dirty : touched.length ? touched : ["."];
|
|
1614
|
+
await this.vcs.stage(this.workspace.workspaceRoot, toStage);
|
|
1615
|
+
const status = await this.vcs.status(this.workspace.workspaceRoot);
|
|
1616
|
+
const hasChanges = status.trim().length > 0;
|
|
1617
|
+
if (hasChanges) {
|
|
1618
|
+
const commitMessage = `[${task.task.key}] ${task.task.title}`;
|
|
1619
|
+
let committed = false;
|
|
1620
|
+
try {
|
|
1621
|
+
await this.vcs.commit(this.workspace.workspaceRoot, commitMessage);
|
|
1622
|
+
committed = true;
|
|
1623
|
+
}
|
|
1624
|
+
catch (error) {
|
|
1625
|
+
const errorText = this.formatGitError(error);
|
|
1626
|
+
const hookFailure = this.isCommitHookFailure(errorText);
|
|
1627
|
+
const gpgFailure = this.isGpgSignFailure(errorText);
|
|
1628
|
+
if (hookFailure || gpgFailure) {
|
|
1629
|
+
const guidance = [
|
|
1630
|
+
hookFailure
|
|
1631
|
+
? "Commit hook failed; run hooks manually or configure bypass (e.g., HUSKY=0) if policy allows."
|
|
1632
|
+
: "",
|
|
1633
|
+
gpgFailure
|
|
1634
|
+
? "GPG signing failed; configure signing key or disable commit.gpgsign for this repo."
|
|
1635
|
+
: "",
|
|
1636
|
+
]
|
|
1637
|
+
.filter(Boolean)
|
|
1638
|
+
.join(" ");
|
|
1639
|
+
await this.logTask(taskRun.id, `Commit failed; retrying with bypass flags. ${guidance}`, "vcs", {
|
|
1640
|
+
error: errorText,
|
|
1641
|
+
});
|
|
1642
|
+
try {
|
|
1643
|
+
await this.vcs.commit(this.workspace.workspaceRoot, commitMessage, {
|
|
1644
|
+
noVerify: hookFailure,
|
|
1645
|
+
noGpgSign: gpgFailure,
|
|
1646
|
+
});
|
|
1647
|
+
committed = true;
|
|
1648
|
+
await this.logTask(taskRun.id, "Commit succeeded after bypassing hook/signing checks.", "vcs");
|
|
1649
|
+
}
|
|
1650
|
+
catch (retryError) {
|
|
1651
|
+
const retryText = this.formatGitError(retryError);
|
|
1652
|
+
throw new Error(`Commit failed after retry: ${retryText}`);
|
|
1653
|
+
}
|
|
1654
|
+
}
|
|
1655
|
+
else {
|
|
1656
|
+
throw error;
|
|
1657
|
+
}
|
|
1658
|
+
}
|
|
1659
|
+
if (committed) {
|
|
1660
|
+
const head = await this.vcs.lastCommitSha(this.workspace.workspaceRoot);
|
|
1661
|
+
await this.deps.workspaceRepo.updateTask(task.task.id, { vcsLastCommitSha: head });
|
|
1662
|
+
await this.logTask(taskRun.id, `Committed changes (${head})`, "vcs");
|
|
1663
|
+
headSha = head;
|
|
1664
|
+
}
|
|
1665
|
+
}
|
|
1666
|
+
else {
|
|
1667
|
+
await this.logTask(taskRun.id, "No changes to commit.", "vcs");
|
|
1668
|
+
}
|
|
1669
|
+
// Always merge back into base and end on base branch.
|
|
1670
|
+
try {
|
|
1671
|
+
await this.vcs.merge(this.workspace.workspaceRoot, branchInfo.branch, branchInfo.base);
|
|
1672
|
+
mergeStatus = "merged";
|
|
1673
|
+
try {
|
|
1674
|
+
headSha = await this.vcs.lastCommitSha(this.workspace.workspaceRoot);
|
|
1675
|
+
}
|
|
1676
|
+
catch {
|
|
1677
|
+
// Best-effort head capture.
|
|
1678
|
+
}
|
|
1679
|
+
await this.logTask(taskRun.id, `Merged ${branchInfo.branch} into ${branchInfo.base}`, "vcs");
|
|
1680
|
+
if (!(await refreshLock("vcs_merge"))) {
|
|
1681
|
+
await this.logTask(taskRun.id, "Aborting task: lock lost after merge.", "vcs");
|
|
1682
|
+
throw new Error("Task lock lost after merge.");
|
|
1683
|
+
}
|
|
1684
|
+
}
|
|
1685
|
+
catch (error) {
|
|
1686
|
+
mergeStatus = "failed";
|
|
1687
|
+
const conflicts = await this.vcs.conflictPaths(this.workspace.workspaceRoot);
|
|
1688
|
+
if (conflicts.length) {
|
|
1689
|
+
await this.logTask(taskRun.id, `Merge conflicts while merging ${branchInfo.branch} into ${branchInfo.base}.`, "vcs", {
|
|
1690
|
+
conflicts,
|
|
1691
|
+
});
|
|
1692
|
+
throw new Error(`Merge conflict(s) while merging ${branchInfo.branch} into ${branchInfo.base}: ${conflicts.join(", ")}`);
|
|
1693
|
+
}
|
|
1694
|
+
throw error;
|
|
1695
|
+
}
|
|
1696
|
+
if (await this.vcs.hasRemote(this.workspace.workspaceRoot)) {
|
|
1697
|
+
const branchPush = await this.pushWithRecovery(taskRun.id, branchInfo.branch);
|
|
1698
|
+
if (branchPush.pushed) {
|
|
1699
|
+
await this.logTask(taskRun.id, "Pushed branch to remote origin", "vcs");
|
|
1700
|
+
}
|
|
1701
|
+
else if (branchPush.skipped) {
|
|
1702
|
+
await this.logTask(taskRun.id, "Skipped pushing branch to remote origin due to permissions/protection.", "vcs");
|
|
1703
|
+
}
|
|
1704
|
+
if (!(await refreshLock("vcs_push_branch"))) {
|
|
1705
|
+
await this.logTask(taskRun.id, "Aborting task: lock lost after pushing branch.", "vcs");
|
|
1706
|
+
throw new Error("Task lock lost after pushing branch.");
|
|
1707
|
+
}
|
|
1708
|
+
const basePush = await this.pushWithRecovery(taskRun.id, branchInfo.base);
|
|
1709
|
+
if (basePush.pushed) {
|
|
1710
|
+
await this.logTask(taskRun.id, `Pushed base branch ${branchInfo.base} to remote origin`, "vcs");
|
|
1711
|
+
}
|
|
1712
|
+
else if (basePush.skipped) {
|
|
1713
|
+
await this.logTask(taskRun.id, `Skipped pushing base branch ${branchInfo.base} due to permissions/protection.`, "vcs");
|
|
1714
|
+
}
|
|
1715
|
+
if (!(await refreshLock("vcs_push_base"))) {
|
|
1716
|
+
await this.logTask(taskRun.id, "Aborting task: lock lost after pushing base branch.", "vcs");
|
|
1717
|
+
throw new Error("Task lock lost after pushing base branch.");
|
|
1718
|
+
}
|
|
1719
|
+
}
|
|
1720
|
+
else {
|
|
1721
|
+
await this.logTask(taskRun.id, "No remote configured; merge completed locally.", "vcs");
|
|
1722
|
+
}
|
|
1723
|
+
}
|
|
1724
|
+
catch (error) {
|
|
1725
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
1726
|
+
if (/task lock lost/i.test(message)) {
|
|
1727
|
+
throw error;
|
|
1728
|
+
}
|
|
1729
|
+
await this.logTask(taskRun.id, `VCS commit/push failed: ${message}`, "vcs");
|
|
1730
|
+
await this.updateTaskPhase(job.id, taskRun.id, task.task.key, "vcs", "error", { error: message });
|
|
1731
|
+
await this.stateService.markBlocked(task.task, "vcs_failed");
|
|
1732
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
|
|
1733
|
+
results.push({ taskKey: task.task.key, status: "failed", notes: "vcs_failed" });
|
|
1734
|
+
taskStatus = "failed";
|
|
1735
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1736
|
+
continue;
|
|
1737
|
+
}
|
|
1738
|
+
await endPhase("vcs", { branch: branchInfo.branch, base: branchInfo.base });
|
|
1739
|
+
}
|
|
1740
|
+
else if (request.dryRun) {
|
|
1741
|
+
await this.logTask(taskRun.id, "Dry-run: skipped commit/push.", "vcs");
|
|
1742
|
+
}
|
|
1743
|
+
else if (request.noCommit) {
|
|
1744
|
+
await this.logTask(taskRun.id, "no-commit set: skipped commit/push.", "vcs");
|
|
1745
|
+
}
|
|
1746
|
+
await startPhase("finalize");
|
|
1747
|
+
const finishedAt = new Date().toISOString();
|
|
1748
|
+
const elapsedSeconds = Math.max(1, (Date.parse(finishedAt) - Date.parse(startedAt)) / 1000);
|
|
1749
|
+
const spPerHour = task.task.storyPoints && task.task.storyPoints > 0 ? (task.task.storyPoints / elapsedSeconds) * 3600 : null;
|
|
1750
|
+
const reviewMetadata = { last_run: finishedAt };
|
|
1751
|
+
if (softFailures.length) {
|
|
1752
|
+
reviewMetadata.soft_failures = softFailures;
|
|
1753
|
+
}
|
|
1754
|
+
await this.stateService.markReadyToReview(task.task, reviewMetadata);
|
|
1755
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, {
|
|
1756
|
+
status: "succeeded",
|
|
1757
|
+
finishedAt,
|
|
1758
|
+
spPerHourEffective: spPerHour,
|
|
1759
|
+
gitBranch: branchInfo.branch,
|
|
1760
|
+
gitBaseBranch: branchInfo.base,
|
|
1761
|
+
});
|
|
1762
|
+
storyPointsProcessed += task.task.storyPoints ?? 0;
|
|
1763
|
+
await endPhase("finalize", { spPerHour: spPerHour ?? undefined });
|
|
1764
|
+
const resultNotes = softFailures.length ? `ready_to_review_with_warnings:${softFailures.join(",")}` : "ready_to_review";
|
|
1765
|
+
taskStatus = "succeeded";
|
|
1766
|
+
results.push({
|
|
1767
|
+
taskKey: task.task.key,
|
|
1768
|
+
status: "succeeded",
|
|
1769
|
+
notes: resultNotes,
|
|
1770
|
+
branch: branchInfo.branch,
|
|
1771
|
+
});
|
|
1772
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1773
|
+
await this.checkpoint(job.id, "task_ready_for_review", { taskKey: task.task.key });
|
|
1774
|
+
}
|
|
1775
|
+
catch (error) {
|
|
1776
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
1777
|
+
if (/task lock lost/i.test(message)) {
|
|
1778
|
+
await this.logTask(taskRun.id, `Task aborted: ${message}`, "vcs");
|
|
1779
|
+
await this.deps.workspaceRepo.updateTaskRun(taskRun.id, { status: "failed", finishedAt: new Date().toISOString() });
|
|
1780
|
+
await this.stateService.markBlocked(task.task, "task_lock_lost");
|
|
1781
|
+
if (!request.dryRun && request.noCommit !== true) {
|
|
1782
|
+
await this.commitPendingChanges(branchInfo, task.task.key, task.task.title, "auto-save (lock_lost)", task.task.id, taskRun.id);
|
|
1783
|
+
}
|
|
1784
|
+
results.push({ taskKey: task.task.key, status: "failed", notes: "task_lock_lost" });
|
|
1785
|
+
taskStatus = "failed";
|
|
1786
|
+
await this.deps.jobService.updateJobStatus(job.id, "running", { processedItems: index + 1 });
|
|
1787
|
+
continue;
|
|
1788
|
+
}
|
|
1789
|
+
throw error;
|
|
1790
|
+
}
|
|
1791
|
+
finally {
|
|
1792
|
+
await emitTaskEndOnce();
|
|
1793
|
+
if (lockAcquired) {
|
|
1794
|
+
await this.deps.workspaceRepo.releaseTaskLock(task.task.id, taskRun.id);
|
|
1795
|
+
}
|
|
1796
|
+
}
|
|
1797
|
+
}
|
|
1798
|
+
const failureCount = results.filter((r) => r.status === "failed" || r.status === "blocked").length;
|
|
1799
|
+
const state = failureCount === 0 ? "completed" : failureCount === results.length ? "failed" : "partial";
|
|
1800
|
+
const errorSummary = failureCount ? `${failureCount} task(s) failed or blocked` : undefined;
|
|
1801
|
+
await this.deps.jobService.updateJobStatus(job.id, state, {
|
|
1802
|
+
processedItems: results.length,
|
|
1803
|
+
errorSummary,
|
|
1804
|
+
});
|
|
1805
|
+
await this.deps.jobService.finishCommandRun(commandRun.id, state === "completed" ? "succeeded" : "failed", errorSummary, storyPointsProcessed || undefined);
|
|
1806
|
+
return {
|
|
1807
|
+
jobId: job.id,
|
|
1808
|
+
commandRunId: commandRun.id,
|
|
1809
|
+
selection,
|
|
1810
|
+
results,
|
|
1811
|
+
warnings,
|
|
1812
|
+
};
|
|
1813
|
+
}
|
|
1814
|
+
catch (error) {
|
|
1815
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
1816
|
+
await this.deps.jobService.updateJobStatus(job.id, "failed", {
|
|
1817
|
+
processedItems: undefined,
|
|
1818
|
+
errorSummary: message,
|
|
1819
|
+
});
|
|
1820
|
+
await this.deps.jobService.finishCommandRun(commandRun.id, "failed", message, storyPointsProcessed || undefined);
|
|
1821
|
+
throw error;
|
|
1822
|
+
}
|
|
1823
|
+
finally {
|
|
1824
|
+
// Best-effort return to base branch after processing.
|
|
1825
|
+
try {
|
|
1826
|
+
await this.vcs.checkoutBranch(this.workspace.workspaceRoot, baseBranch);
|
|
1827
|
+
}
|
|
1828
|
+
catch {
|
|
1829
|
+
// ignore if checkout fails (e.g., dirty tree); user can resolve manually.
|
|
1830
|
+
}
|
|
1831
|
+
}
|
|
1832
|
+
}
|
|
1833
|
+
}
|