@mcoda/core 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/LICENSE +21 -0
- package/README.md +9 -0
- package/dist/api/AgentsApi.d.ts +36 -0
- package/dist/api/AgentsApi.d.ts.map +1 -0
- package/dist/api/AgentsApi.js +176 -0
- package/dist/api/QaTasksApi.d.ts +8 -0
- package/dist/api/QaTasksApi.d.ts.map +1 -0
- package/dist/api/QaTasksApi.js +36 -0
- package/dist/api/TasksApi.d.ts +7 -0
- package/dist/api/TasksApi.d.ts.map +1 -0
- package/dist/api/TasksApi.js +34 -0
- package/dist/config/ConfigService.d.ts +3 -0
- package/dist/config/ConfigService.d.ts.map +1 -0
- package/dist/config/ConfigService.js +2 -0
- package/dist/domain/dependencies/Dependency.d.ts +3 -0
- package/dist/domain/dependencies/Dependency.d.ts.map +1 -0
- package/dist/domain/dependencies/Dependency.js +2 -0
- package/dist/domain/epics/Epic.d.ts +3 -0
- package/dist/domain/epics/Epic.d.ts.map +1 -0
- package/dist/domain/epics/Epic.js +2 -0
- package/dist/domain/projects/Project.d.ts +3 -0
- package/dist/domain/projects/Project.d.ts.map +1 -0
- package/dist/domain/projects/Project.js +2 -0
- package/dist/domain/tasks/Task.d.ts +3 -0
- package/dist/domain/tasks/Task.d.ts.map +1 -0
- package/dist/domain/tasks/Task.js +2 -0
- package/dist/domain/userStories/UserStory.d.ts +3 -0
- package/dist/domain/userStories/UserStory.d.ts.map +1 -0
- package/dist/domain/userStories/UserStory.js +2 -0
- package/dist/index.d.ts +28 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +27 -0
- package/dist/prompts/PdrPrompts.d.ts +4 -0
- package/dist/prompts/PdrPrompts.d.ts.map +1 -0
- package/dist/prompts/PdrPrompts.js +21 -0
- package/dist/prompts/PromptLoader.d.ts +3 -0
- package/dist/prompts/PromptLoader.d.ts.map +1 -0
- package/dist/prompts/PromptLoader.js +2 -0
- package/dist/prompts/SdsPrompts.d.ts +5 -0
- package/dist/prompts/SdsPrompts.d.ts.map +1 -0
- package/dist/prompts/SdsPrompts.js +44 -0
- package/dist/services/agents/AgentManagementService.d.ts +3 -0
- package/dist/services/agents/AgentManagementService.d.ts.map +1 -0
- package/dist/services/agents/AgentManagementService.js +2 -0
- package/dist/services/agents/GatewayAgentService.d.ts +92 -0
- package/dist/services/agents/GatewayAgentService.d.ts.map +1 -0
- package/dist/services/agents/GatewayAgentService.js +870 -0
- package/dist/services/agents/RoutingApiClient.d.ts +23 -0
- package/dist/services/agents/RoutingApiClient.d.ts.map +1 -0
- package/dist/services/agents/RoutingApiClient.js +62 -0
- package/dist/services/agents/RoutingService.d.ts +50 -0
- package/dist/services/agents/RoutingService.d.ts.map +1 -0
- package/dist/services/agents/RoutingService.js +386 -0
- package/dist/services/agents/generated/RoutingApiClient.d.ts +21 -0
- package/dist/services/agents/generated/RoutingApiClient.d.ts.map +1 -0
- package/dist/services/agents/generated/RoutingApiClient.js +68 -0
- package/dist/services/backlog/BacklogService.d.ts +98 -0
- package/dist/services/backlog/BacklogService.d.ts.map +1 -0
- package/dist/services/backlog/BacklogService.js +453 -0
- package/dist/services/backlog/TaskOrderingService.d.ts +88 -0
- package/dist/services/backlog/TaskOrderingService.d.ts.map +1 -0
- package/dist/services/backlog/TaskOrderingService.js +675 -0
- package/dist/services/docs/DocsService.d.ts +82 -0
- package/dist/services/docs/DocsService.d.ts.map +1 -0
- package/dist/services/docs/DocsService.js +1631 -0
- package/dist/services/estimate/EstimateService.d.ts +12 -0
- package/dist/services/estimate/EstimateService.d.ts.map +1 -0
- package/dist/services/estimate/EstimateService.js +103 -0
- package/dist/services/estimate/VelocityService.d.ts +19 -0
- package/dist/services/estimate/VelocityService.d.ts.map +1 -0
- package/dist/services/estimate/VelocityService.js +237 -0
- package/dist/services/estimate/types.d.ts +30 -0
- package/dist/services/estimate/types.d.ts.map +1 -0
- package/dist/services/estimate/types.js +1 -0
- package/dist/services/execution/ExecutionService.d.ts +3 -0
- package/dist/services/execution/ExecutionService.d.ts.map +1 -0
- package/dist/services/execution/ExecutionService.js +2 -0
- package/dist/services/execution/QaFollowupService.d.ts +38 -0
- package/dist/services/execution/QaFollowupService.d.ts.map +1 -0
- package/dist/services/execution/QaFollowupService.js +236 -0
- package/dist/services/execution/QaProfileService.d.ts +22 -0
- package/dist/services/execution/QaProfileService.d.ts.map +1 -0
- package/dist/services/execution/QaProfileService.js +142 -0
- package/dist/services/execution/QaTasksService.d.ts +101 -0
- package/dist/services/execution/QaTasksService.d.ts.map +1 -0
- package/dist/services/execution/QaTasksService.js +1117 -0
- package/dist/services/execution/TaskSelectionService.d.ts +50 -0
- package/dist/services/execution/TaskSelectionService.d.ts.map +1 -0
- package/dist/services/execution/TaskSelectionService.js +281 -0
- package/dist/services/execution/TaskStateService.d.ts +19 -0
- package/dist/services/execution/TaskStateService.d.ts.map +1 -0
- package/dist/services/execution/TaskStateService.js +59 -0
- package/dist/services/execution/WorkOnTasksService.d.ts +80 -0
- package/dist/services/execution/WorkOnTasksService.d.ts.map +1 -0
- package/dist/services/execution/WorkOnTasksService.js +1833 -0
- package/dist/services/jobs/JobInsightsService.d.ts +97 -0
- package/dist/services/jobs/JobInsightsService.d.ts.map +1 -0
- package/dist/services/jobs/JobInsightsService.js +263 -0
- package/dist/services/jobs/JobResumeService.d.ts +16 -0
- package/dist/services/jobs/JobResumeService.d.ts.map +1 -0
- package/dist/services/jobs/JobResumeService.js +113 -0
- package/dist/services/jobs/JobService.d.ts +149 -0
- package/dist/services/jobs/JobService.d.ts.map +1 -0
- package/dist/services/jobs/JobService.js +490 -0
- package/dist/services/jobs/JobsApiClient.d.ts +73 -0
- package/dist/services/jobs/JobsApiClient.d.ts.map +1 -0
- package/dist/services/jobs/JobsApiClient.js +67 -0
- package/dist/services/openapi/OpenApiService.d.ts +54 -0
- package/dist/services/openapi/OpenApiService.d.ts.map +1 -0
- package/dist/services/openapi/OpenApiService.js +503 -0
- package/dist/services/planning/CreateTasksService.d.ts +68 -0
- package/dist/services/planning/CreateTasksService.d.ts.map +1 -0
- package/dist/services/planning/CreateTasksService.js +989 -0
- package/dist/services/planning/KeyHelpers.d.ts +5 -0
- package/dist/services/planning/KeyHelpers.d.ts.map +1 -0
- package/dist/services/planning/KeyHelpers.js +62 -0
- package/dist/services/planning/PlanningService.d.ts +3 -0
- package/dist/services/planning/PlanningService.d.ts.map +1 -0
- package/dist/services/planning/PlanningService.js +2 -0
- package/dist/services/planning/RefineTasksService.d.ts +56 -0
- package/dist/services/planning/RefineTasksService.d.ts.map +1 -0
- package/dist/services/planning/RefineTasksService.js +1328 -0
- package/dist/services/review/CodeReviewService.d.ts +103 -0
- package/dist/services/review/CodeReviewService.d.ts.map +1 -0
- package/dist/services/review/CodeReviewService.js +1187 -0
- package/dist/services/system/SystemUpdateService.d.ts +55 -0
- package/dist/services/system/SystemUpdateService.d.ts.map +1 -0
- package/dist/services/system/SystemUpdateService.js +136 -0
- package/dist/services/tasks/TaskApiResolver.d.ts +7 -0
- package/dist/services/tasks/TaskApiResolver.d.ts.map +1 -0
- package/dist/services/tasks/TaskApiResolver.js +41 -0
- package/dist/services/tasks/TaskDetailService.d.ts +106 -0
- package/dist/services/tasks/TaskDetailService.d.ts.map +1 -0
- package/dist/services/tasks/TaskDetailService.js +332 -0
- package/dist/services/telemetry/TelemetryService.d.ts +53 -0
- package/dist/services/telemetry/TelemetryService.d.ts.map +1 -0
- package/dist/services/telemetry/TelemetryService.js +434 -0
- package/dist/workspace/WorkspaceManager.d.ts +35 -0
- package/dist/workspace/WorkspaceManager.d.ts.map +1 -0
- package/dist/workspace/WorkspaceManager.js +201 -0
- package/package.json +45 -0
|
@@ -0,0 +1,989 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import { promises as fs } from "node:fs";
|
|
3
|
+
import { AgentService } from "@mcoda/agents";
|
|
4
|
+
import { GlobalRepository, WorkspaceRepository, } from "@mcoda/db";
|
|
5
|
+
import { setTimeout as delay } from "node:timers/promises";
|
|
6
|
+
import { DocdexClient } from "@mcoda/integrations";
|
|
7
|
+
import { JobService } from "../jobs/JobService.js";
|
|
8
|
+
import { RoutingService } from "../agents/RoutingService.js";
|
|
9
|
+
import { createEpicKeyGenerator, createStoryKeyGenerator, createTaskKeyGenerator, } from "./KeyHelpers.js";
|
|
10
|
+
const formatBullets = (items, fallback) => {
|
|
11
|
+
if (!items || items.length === 0)
|
|
12
|
+
return `- ${fallback}`;
|
|
13
|
+
return items.map((item) => `- ${item}`).join("\n");
|
|
14
|
+
};
|
|
15
|
+
const ensureNonEmpty = (value, fallback) => value && value.trim().length > 0 ? value.trim() : fallback;
|
|
16
|
+
const estimateTokens = (text) => Math.max(1, Math.ceil(text.length / 4));
|
|
17
|
+
const DOC_CONTEXT_BUDGET = 8000;
|
|
18
|
+
const inferDocType = (filePath) => {
|
|
19
|
+
const name = path.basename(filePath).toLowerCase();
|
|
20
|
+
if (name.includes("sds"))
|
|
21
|
+
return "SDS";
|
|
22
|
+
if (name.includes("pdr"))
|
|
23
|
+
return "PDR";
|
|
24
|
+
if (name.includes("rfp"))
|
|
25
|
+
return "RFP";
|
|
26
|
+
return "DOC";
|
|
27
|
+
};
|
|
28
|
+
const describeDoc = (doc, idx) => {
|
|
29
|
+
const title = doc.title ?? doc.path ?? doc.id ?? `doc-${idx + 1}`;
|
|
30
|
+
const source = doc.path ?? doc.id ?? "docdex";
|
|
31
|
+
const head = doc.content ? doc.content.split(/\r?\n/).slice(0, 3).join(" ").slice(0, 240) : "";
|
|
32
|
+
return `- [${doc.docType}] ${title} (handle: docdex:${doc.id ?? `doc-${idx + 1}`}, source: ${source})${head ? `\n Excerpt: ${head}` : ""}`;
|
|
33
|
+
};
|
|
34
|
+
const extractJson = (raw) => {
|
|
35
|
+
const fenced = raw.match(/```json([\s\S]*?)```/);
|
|
36
|
+
const candidate = fenced ? fenced[1] : raw;
|
|
37
|
+
const start = candidate.indexOf("{");
|
|
38
|
+
const end = candidate.lastIndexOf("}");
|
|
39
|
+
if (start === -1 || end === -1 || end <= start)
|
|
40
|
+
return undefined;
|
|
41
|
+
const body = candidate.slice(start, end + 1);
|
|
42
|
+
try {
|
|
43
|
+
return JSON.parse(body);
|
|
44
|
+
}
|
|
45
|
+
catch {
|
|
46
|
+
return undefined;
|
|
47
|
+
}
|
|
48
|
+
};
|
|
49
|
+
const buildEpicDescription = (epicKey, title, description, acceptance, relatedDocs) => {
|
|
50
|
+
return [
|
|
51
|
+
`* **Epic Key**: ${epicKey}`,
|
|
52
|
+
`* **Epic Title**: ${title}`,
|
|
53
|
+
"* **Context / Problem**",
|
|
54
|
+
"",
|
|
55
|
+
ensureNonEmpty(description, "Summarize the problem, users, and constraints for this epic."),
|
|
56
|
+
"* **Goals & Outcomes**",
|
|
57
|
+
formatBullets(acceptance, "List measurable outcomes for this epic."),
|
|
58
|
+
"* **In Scope**",
|
|
59
|
+
"- Clarify during refinement; derived from RFP/PDR/SDS.",
|
|
60
|
+
"* **Out of Scope**",
|
|
61
|
+
"- To be defined; exclude unrelated systems.",
|
|
62
|
+
"* **Key Flows / Scenarios**",
|
|
63
|
+
"- Outline primary user flows for this epic.",
|
|
64
|
+
"* **Non-functional Requirements**",
|
|
65
|
+
"- Performance, security, reliability expectations go here.",
|
|
66
|
+
"* **Dependencies & Constraints**",
|
|
67
|
+
"- Capture upstream/downstream systems and blockers.",
|
|
68
|
+
"* **Risks & Open Questions**",
|
|
69
|
+
"- Identify risks and unknowns to resolve.",
|
|
70
|
+
"* **Acceptance Criteria**",
|
|
71
|
+
formatBullets(acceptance, "Provide 5–10 testable acceptance criteria."),
|
|
72
|
+
"* **Related Documentation / References**",
|
|
73
|
+
formatBullets(relatedDocs, "Link relevant docdex entries and sections."),
|
|
74
|
+
].join("\n");
|
|
75
|
+
};
|
|
76
|
+
const buildStoryDescription = (storyKey, title, userStory, description, acceptanceCriteria, relatedDocs) => {
|
|
77
|
+
return [
|
|
78
|
+
`* **Story Key**: ${storyKey}`,
|
|
79
|
+
"* **User Story**",
|
|
80
|
+
"",
|
|
81
|
+
ensureNonEmpty(userStory, `As a user, I want ${title} so that it delivers value.`),
|
|
82
|
+
"* **Context**",
|
|
83
|
+
"",
|
|
84
|
+
ensureNonEmpty(description, "Context for systems, dependencies, and scope."),
|
|
85
|
+
"* **Preconditions / Assumptions**",
|
|
86
|
+
"- Confirm required data, environments, and access.",
|
|
87
|
+
"* **Main Flow**",
|
|
88
|
+
"- Outline the happy path for this story.",
|
|
89
|
+
"* **Alternative / Error Flows**",
|
|
90
|
+
"- Capture error handling and non-happy paths.",
|
|
91
|
+
"* **UX / UI Notes**",
|
|
92
|
+
"- Enumerate screens/states if applicable.",
|
|
93
|
+
"* **Data & Integrations**",
|
|
94
|
+
"- Note key entities, APIs, queues, or third-party dependencies.",
|
|
95
|
+
"* **Acceptance Criteria**",
|
|
96
|
+
formatBullets(acceptanceCriteria, "List testable outcomes for this story."),
|
|
97
|
+
"* **Non-functional Requirements**",
|
|
98
|
+
"- Add story-specific performance/reliability/security expectations.",
|
|
99
|
+
"* **Related Documentation / References**",
|
|
100
|
+
formatBullets(relatedDocs, "Docdex handles, OpenAPI endpoints, code modules."),
|
|
101
|
+
].join("\n");
|
|
102
|
+
};
|
|
103
|
+
const buildTaskDescription = (taskKey, title, description, storyKey, epicKey, relatedDocs, dependencies) => {
|
|
104
|
+
return [
|
|
105
|
+
`* **Task Key**: ${taskKey}`,
|
|
106
|
+
"* **Objective**",
|
|
107
|
+
"",
|
|
108
|
+
ensureNonEmpty(description, `Deliver ${title} for story ${storyKey}.`),
|
|
109
|
+
"* **Context**",
|
|
110
|
+
"",
|
|
111
|
+
`- Epic: ${epicKey}`,
|
|
112
|
+
`- Story: ${storyKey}`,
|
|
113
|
+
"* **Inputs**",
|
|
114
|
+
formatBullets(relatedDocs, "Docdex excerpts, SDS/PDR/RFP sections, OpenAPI endpoints."),
|
|
115
|
+
"* **Implementation Plan**",
|
|
116
|
+
"- Break this into concrete steps during execution.",
|
|
117
|
+
"* **Definition of Done**",
|
|
118
|
+
"- Tests passing, docs updated, review/QA complete.",
|
|
119
|
+
"* **Testing & QA**",
|
|
120
|
+
"- Unit/integration coverage for changed areas.",
|
|
121
|
+
"* **Dependencies**",
|
|
122
|
+
formatBullets(dependencies, "Enumerate prerequisite tasks by key."),
|
|
123
|
+
"* **Risks & Gotchas**",
|
|
124
|
+
"- Highlight edge cases or risky areas.",
|
|
125
|
+
"* **Related Documentation / References**",
|
|
126
|
+
formatBullets(relatedDocs, "Docdex handles or file paths to consult."),
|
|
127
|
+
].join("\n");
|
|
128
|
+
};
|
|
129
|
+
const collectFilesRecursively = async (target) => {
|
|
130
|
+
const stat = await fs.stat(target);
|
|
131
|
+
if (stat.isDirectory()) {
|
|
132
|
+
const entries = await fs.readdir(target);
|
|
133
|
+
const results = [];
|
|
134
|
+
for (const entry of entries) {
|
|
135
|
+
const child = path.join(target, entry);
|
|
136
|
+
const childStat = await fs.stat(child);
|
|
137
|
+
if (childStat.isDirectory()) {
|
|
138
|
+
results.push(...(await collectFilesRecursively(child)));
|
|
139
|
+
}
|
|
140
|
+
else {
|
|
141
|
+
results.push(child);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
return results;
|
|
145
|
+
}
|
|
146
|
+
return [target];
|
|
147
|
+
};
|
|
148
|
+
const EPIC_SCHEMA_SNIPPET = `{
|
|
149
|
+
"epics": [
|
|
150
|
+
{
|
|
151
|
+
"localId": "e1",
|
|
152
|
+
"area": "web|adm|bck|ops|infra|mobile",
|
|
153
|
+
"title": "Epic title",
|
|
154
|
+
"description": "Epic description using the epic template",
|
|
155
|
+
"acceptanceCriteria": ["criterion"],
|
|
156
|
+
"relatedDocs": ["docdex:..."],
|
|
157
|
+
"priorityHint": 50
|
|
158
|
+
}
|
|
159
|
+
]
|
|
160
|
+
}`;
|
|
161
|
+
const STORY_SCHEMA_SNIPPET = `{
|
|
162
|
+
"stories": [
|
|
163
|
+
{
|
|
164
|
+
"localId": "us1",
|
|
165
|
+
"title": "Story title",
|
|
166
|
+
"userStory": "As a ...",
|
|
167
|
+
"description": "Story description using the template",
|
|
168
|
+
"acceptanceCriteria": ["criterion"],
|
|
169
|
+
"relatedDocs": ["docdex:..."],
|
|
170
|
+
"priorityHint": 50
|
|
171
|
+
}
|
|
172
|
+
]
|
|
173
|
+
}`;
|
|
174
|
+
const TASK_SCHEMA_SNIPPET = `{
|
|
175
|
+
"tasks": [
|
|
176
|
+
{
|
|
177
|
+
"localId": "t1",
|
|
178
|
+
"title": "Task title",
|
|
179
|
+
"type": "feature|bug|chore|spike",
|
|
180
|
+
"description": "Task description using the template",
|
|
181
|
+
"estimatedStoryPoints": 3,
|
|
182
|
+
"priorityHint": 50,
|
|
183
|
+
"dependsOnKeys": ["t0"],
|
|
184
|
+
"relatedDocs": ["docdex:..."]
|
|
185
|
+
}
|
|
186
|
+
]
|
|
187
|
+
}`;
|
|
188
|
+
export class CreateTasksService {
|
|
189
|
+
constructor(workspace, deps) {
|
|
190
|
+
this.workspace = workspace;
|
|
191
|
+
this.docdex = deps.docdex;
|
|
192
|
+
this.jobService = deps.jobService;
|
|
193
|
+
this.agentService = deps.agentService;
|
|
194
|
+
this.repo = deps.repo;
|
|
195
|
+
this.workspaceRepo = deps.workspaceRepo;
|
|
196
|
+
this.routingService = deps.routingService;
|
|
197
|
+
}
|
|
198
|
+
static async create(workspace) {
|
|
199
|
+
const repo = await GlobalRepository.create();
|
|
200
|
+
const agentService = new AgentService(repo);
|
|
201
|
+
const routingService = await RoutingService.create();
|
|
202
|
+
const docdex = new DocdexClient({
|
|
203
|
+
workspaceRoot: workspace.workspaceRoot,
|
|
204
|
+
baseUrl: workspace.config?.docdexUrl ?? process.env.MCODA_DOCDEX_URL,
|
|
205
|
+
});
|
|
206
|
+
const jobService = new JobService(workspace);
|
|
207
|
+
const workspaceRepo = await WorkspaceRepository.create(workspace.workspaceRoot);
|
|
208
|
+
return new CreateTasksService(workspace, {
|
|
209
|
+
docdex,
|
|
210
|
+
jobService,
|
|
211
|
+
agentService,
|
|
212
|
+
repo,
|
|
213
|
+
workspaceRepo,
|
|
214
|
+
routingService,
|
|
215
|
+
});
|
|
216
|
+
}
|
|
217
|
+
async close() {
|
|
218
|
+
const swallow = async (fn) => {
|
|
219
|
+
try {
|
|
220
|
+
if (fn)
|
|
221
|
+
await fn();
|
|
222
|
+
}
|
|
223
|
+
catch {
|
|
224
|
+
// Best-effort close; ignore errors (including "database is closed").
|
|
225
|
+
}
|
|
226
|
+
};
|
|
227
|
+
await swallow(this.agentService.close?.bind(this.agentService));
|
|
228
|
+
await swallow(this.repo.close?.bind(this.repo));
|
|
229
|
+
await swallow(this.jobService.close?.bind(this.jobService));
|
|
230
|
+
await swallow(this.workspaceRepo.close?.bind(this.workspaceRepo));
|
|
231
|
+
await swallow(this.routingService.close?.bind(this.routingService));
|
|
232
|
+
const docdex = this.docdex;
|
|
233
|
+
await swallow(docdex?.close?.bind(docdex));
|
|
234
|
+
}
|
|
235
|
+
async resolveAgent(agentName) {
|
|
236
|
+
const resolved = await this.routingService.resolveAgentForCommand({
|
|
237
|
+
workspace: this.workspace,
|
|
238
|
+
commandName: "create-tasks",
|
|
239
|
+
overrideAgentSlug: agentName,
|
|
240
|
+
});
|
|
241
|
+
return resolved.agent;
|
|
242
|
+
}
|
|
243
|
+
async prepareDocs(inputs) {
|
|
244
|
+
const documents = [];
|
|
245
|
+
for (const input of inputs) {
|
|
246
|
+
if (input.startsWith("docdex:")) {
|
|
247
|
+
const docId = input.replace(/^docdex:/, "");
|
|
248
|
+
try {
|
|
249
|
+
const doc = await this.docdex.fetchDocumentById(docId);
|
|
250
|
+
documents.push(doc);
|
|
251
|
+
}
|
|
252
|
+
catch (error) {
|
|
253
|
+
throw new Error(`Docdex reference failed (${docId}): ${error.message}`);
|
|
254
|
+
}
|
|
255
|
+
continue;
|
|
256
|
+
}
|
|
257
|
+
const resolved = path.isAbsolute(input) ? input : path.join(this.workspace.workspaceRoot, input);
|
|
258
|
+
let paths;
|
|
259
|
+
try {
|
|
260
|
+
paths = await collectFilesRecursively(resolved);
|
|
261
|
+
}
|
|
262
|
+
catch (error) {
|
|
263
|
+
throw new Error(`Failed to read input ${input}: ${error.message}`);
|
|
264
|
+
}
|
|
265
|
+
for (const filePath of paths) {
|
|
266
|
+
const docType = inferDocType(filePath);
|
|
267
|
+
try {
|
|
268
|
+
const doc = await this.docdex.ensureRegisteredFromFile(filePath, docType, {
|
|
269
|
+
projectKey: this.workspace.workspaceId,
|
|
270
|
+
});
|
|
271
|
+
documents.push(doc);
|
|
272
|
+
}
|
|
273
|
+
catch (error) {
|
|
274
|
+
throw new Error(`Docdex register failed for ${filePath}: ${error.message}`);
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
return documents;
|
|
279
|
+
}
|
|
280
|
+
buildDocContext(docs) {
|
|
281
|
+
const warnings = [];
|
|
282
|
+
const blocks = [];
|
|
283
|
+
let budget = DOC_CONTEXT_BUDGET;
|
|
284
|
+
const sorted = [...docs].sort((a, b) => (b.updatedAt ?? "").localeCompare(a.updatedAt ?? ""));
|
|
285
|
+
for (const [idx, doc] of sorted.entries()) {
|
|
286
|
+
const segments = (doc.segments ?? []).slice(0, 5);
|
|
287
|
+
const content = segments.length
|
|
288
|
+
? segments
|
|
289
|
+
.map((seg, i) => {
|
|
290
|
+
const trimmed = seg.content.length > 600 ? `${seg.content.slice(0, 600)}...` : seg.content;
|
|
291
|
+
return ` - (${i + 1}) ${seg.heading ? `${seg.heading}: ` : ""}${trimmed}`;
|
|
292
|
+
})
|
|
293
|
+
.join("\n")
|
|
294
|
+
: doc.content
|
|
295
|
+
? doc.content.slice(0, 800)
|
|
296
|
+
: "";
|
|
297
|
+
const entry = [`[${doc.docType}] docdex:${doc.id ?? `doc-${idx + 1}`}`, describeDoc(doc, idx), content]
|
|
298
|
+
.filter(Boolean)
|
|
299
|
+
.join("\n");
|
|
300
|
+
const cost = estimateTokens(entry);
|
|
301
|
+
if (budget - cost < 0) {
|
|
302
|
+
warnings.push(`Context truncated due to token budget; skipped doc ${doc.id ?? doc.path ?? idx + 1}.`);
|
|
303
|
+
continue;
|
|
304
|
+
}
|
|
305
|
+
budget -= cost;
|
|
306
|
+
blocks.push(entry);
|
|
307
|
+
if (budget <= 0)
|
|
308
|
+
break;
|
|
309
|
+
}
|
|
310
|
+
return { docSummary: blocks.join("\n\n") || "(no docs)", warnings };
|
|
311
|
+
}
|
|
312
|
+
buildPrompt(projectKey, docs, options) {
|
|
313
|
+
const docSummary = docs.map((doc, idx) => describeDoc(doc, idx)).join("\n");
|
|
314
|
+
const limits = [
|
|
315
|
+
options.maxEpics ? `Limit epics to ${options.maxEpics}.` : "",
|
|
316
|
+
options.maxStoriesPerEpic ? `Limit stories per epic to ${options.maxStoriesPerEpic}.` : "",
|
|
317
|
+
options.maxTasksPerStory ? `Limit tasks per story to ${options.maxTasksPerStory}.` : "",
|
|
318
|
+
]
|
|
319
|
+
.filter(Boolean)
|
|
320
|
+
.join(" ");
|
|
321
|
+
const prompt = [
|
|
322
|
+
`You are assisting in creating EPICS ONLY for project ${projectKey}.`,
|
|
323
|
+
"Follow mcoda SDS epic template:",
|
|
324
|
+
"- Context/Problem; Goals & Outcomes; In Scope; Out of Scope; Key Flows; Non-functional Requirements; Dependencies & Constraints; Risks & Open Questions; Acceptance Criteria; Related Documentation.",
|
|
325
|
+
"Return strictly valid JSON (no prose) matching:",
|
|
326
|
+
EPIC_SCHEMA_SNIPPET,
|
|
327
|
+
"Rules:",
|
|
328
|
+
"- Do NOT include final slugs; the system will assign keys.",
|
|
329
|
+
"- Use docdex handles when referencing docs.",
|
|
330
|
+
"- acceptanceCriteria must be an array of strings (5-10 items).",
|
|
331
|
+
limits || "Use reasonable scope without over-generating epics.",
|
|
332
|
+
"Docs available:",
|
|
333
|
+
docSummary || "- (no docs provided; propose sensible epics).",
|
|
334
|
+
].join("\n\n");
|
|
335
|
+
return { prompt, docSummary };
|
|
336
|
+
}
|
|
337
|
+
fallbackPlan(projectKey, docs) {
|
|
338
|
+
const docRefs = docs.map((doc) => doc.id ?? doc.path ?? doc.title ?? "doc");
|
|
339
|
+
return {
|
|
340
|
+
epics: [
|
|
341
|
+
{
|
|
342
|
+
area: projectKey,
|
|
343
|
+
title: `Initial planning for ${projectKey}`,
|
|
344
|
+
description: `Seed epic derived from provided documentation (${docRefs.join(", ")})`,
|
|
345
|
+
acceptanceCriteria: ["Backlog created with actionable tasks", "Dependencies identified", "Tasks grouped by user value"],
|
|
346
|
+
relatedDocs: docRefs,
|
|
347
|
+
stories: [
|
|
348
|
+
{
|
|
349
|
+
localId: "story-1",
|
|
350
|
+
title: "Review inputs and draft backlog",
|
|
351
|
+
userStory: "As a planner, I want a decomposed backlog so that work can be prioritized.",
|
|
352
|
+
description: "Review provided docs and produce a first-pass backlog.",
|
|
353
|
+
acceptanceCriteria: [
|
|
354
|
+
"Epics, stories, and tasks are listed",
|
|
355
|
+
"Each task has an objective and DoD",
|
|
356
|
+
"Dependencies noted",
|
|
357
|
+
],
|
|
358
|
+
relatedDocs: docRefs,
|
|
359
|
+
tasks: [
|
|
360
|
+
{
|
|
361
|
+
localId: "task-1",
|
|
362
|
+
title: "Summarize requirements",
|
|
363
|
+
type: "chore",
|
|
364
|
+
description: "Summarize key asks from docs and SDS/PDR/RFP inputs.",
|
|
365
|
+
estimatedStoryPoints: 1,
|
|
366
|
+
priorityHint: 10,
|
|
367
|
+
relatedDocs: docRefs,
|
|
368
|
+
},
|
|
369
|
+
{
|
|
370
|
+
localId: "task-2",
|
|
371
|
+
title: "Propose tasks and ordering",
|
|
372
|
+
type: "feature",
|
|
373
|
+
description: "Break down the scope into tasks with initial dependencies.",
|
|
374
|
+
estimatedStoryPoints: 2,
|
|
375
|
+
priorityHint: 20,
|
|
376
|
+
dependsOnKeys: ["task-1"],
|
|
377
|
+
relatedDocs: docRefs,
|
|
378
|
+
},
|
|
379
|
+
],
|
|
380
|
+
},
|
|
381
|
+
],
|
|
382
|
+
},
|
|
383
|
+
],
|
|
384
|
+
};
|
|
385
|
+
}
|
|
386
|
+
async invokeAgentWithRetry(agent, prompt, action, stream, jobId, commandRunId, metadata) {
|
|
387
|
+
const startedAt = Date.now();
|
|
388
|
+
let output = "";
|
|
389
|
+
const logChunk = async (chunk) => {
|
|
390
|
+
if (!chunk)
|
|
391
|
+
return;
|
|
392
|
+
await this.jobService.appendLog(jobId, chunk);
|
|
393
|
+
if (stream)
|
|
394
|
+
process.stdout.write(chunk);
|
|
395
|
+
};
|
|
396
|
+
try {
|
|
397
|
+
if (stream) {
|
|
398
|
+
const gen = await this.agentService.invokeStream(agent.id, { input: prompt });
|
|
399
|
+
for await (const chunk of gen) {
|
|
400
|
+
output += chunk.output ?? "";
|
|
401
|
+
await logChunk(chunk.output);
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
else {
|
|
405
|
+
const result = await this.agentService.invoke(agent.id, { input: prompt });
|
|
406
|
+
output = result.output ?? "";
|
|
407
|
+
await logChunk(output);
|
|
408
|
+
}
|
|
409
|
+
}
|
|
410
|
+
catch (error) {
|
|
411
|
+
throw new Error(`Agent invocation failed (${action}): ${error.message}`);
|
|
412
|
+
}
|
|
413
|
+
let parsed = extractJson(output);
|
|
414
|
+
if (!parsed) {
|
|
415
|
+
const fixPrompt = [
|
|
416
|
+
"Rewrite the previous response into valid JSON matching the expected schema.",
|
|
417
|
+
`Schema hint:\n${action === "epics" ? EPIC_SCHEMA_SNIPPET : action === "stories" ? STORY_SCHEMA_SNIPPET : TASK_SCHEMA_SNIPPET}`,
|
|
418
|
+
"Return JSON only; no prose.",
|
|
419
|
+
`Original content:\n${output}`,
|
|
420
|
+
].join("\n\n");
|
|
421
|
+
try {
|
|
422
|
+
const fix = await this.agentService.invoke(agent.id, { input: fixPrompt });
|
|
423
|
+
output = fix.output ?? "";
|
|
424
|
+
parsed = extractJson(output);
|
|
425
|
+
}
|
|
426
|
+
catch (error) {
|
|
427
|
+
throw new Error(`Agent retry failed (${action}): ${error.message}`);
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
if (!parsed) {
|
|
431
|
+
throw new Error(`Agent output was not valid JSON for ${action}`);
|
|
432
|
+
}
|
|
433
|
+
const promptTokens = estimateTokens(prompt);
|
|
434
|
+
const completionTokens = estimateTokens(output);
|
|
435
|
+
const durationSeconds = (Date.now() - startedAt) / 1000;
|
|
436
|
+
await this.jobService.recordTokenUsage({
|
|
437
|
+
timestamp: new Date().toISOString(),
|
|
438
|
+
workspaceId: this.workspace.workspaceId,
|
|
439
|
+
jobId,
|
|
440
|
+
commandRunId,
|
|
441
|
+
agentId: agent.id,
|
|
442
|
+
modelName: agent.defaultModel,
|
|
443
|
+
promptTokens,
|
|
444
|
+
completionTokens,
|
|
445
|
+
tokensPrompt: promptTokens,
|
|
446
|
+
tokensCompletion: completionTokens,
|
|
447
|
+
tokensTotal: promptTokens + completionTokens,
|
|
448
|
+
durationSeconds,
|
|
449
|
+
metadata: { action: `create_tasks_${action}`, ...(metadata ?? {}) },
|
|
450
|
+
});
|
|
451
|
+
return { output, promptTokens, completionTokens };
|
|
452
|
+
}
|
|
453
|
+
parseEpics(output, fallbackDocs, projectKey) {
|
|
454
|
+
const parsed = extractJson(output);
|
|
455
|
+
if (!parsed || !Array.isArray(parsed.epics) || parsed.epics.length === 0) {
|
|
456
|
+
throw new Error("Agent did not return epics in expected format");
|
|
457
|
+
}
|
|
458
|
+
return parsed.epics
|
|
459
|
+
.map((epic, idx) => ({
|
|
460
|
+
localId: epic.localId ?? `e${idx + 1}`,
|
|
461
|
+
area: epic.area,
|
|
462
|
+
title: epic.title ?? "Epic",
|
|
463
|
+
description: epic.description,
|
|
464
|
+
acceptanceCriteria: Array.isArray(epic.acceptanceCriteria) ? epic.acceptanceCriteria : [],
|
|
465
|
+
relatedDocs: Array.isArray(epic.relatedDocs) ? epic.relatedDocs : [],
|
|
466
|
+
priorityHint: typeof epic.priorityHint === "number" ? epic.priorityHint : undefined,
|
|
467
|
+
stories: [],
|
|
468
|
+
}))
|
|
469
|
+
.filter((e) => e.title);
|
|
470
|
+
}
|
|
471
|
+
async generateStoriesForEpic(agent, epic, docSummary, stream, jobId, commandRunId) {
|
|
472
|
+
const prompt = [
|
|
473
|
+
`Generate user stories for epic "${epic.title}".`,
|
|
474
|
+
"Use the User Story template: User Story; Context; Preconditions; Main Flow; Alternative/Error Flows; UX/UI; Data & Integrations; Acceptance Criteria; NFR; Related Docs.",
|
|
475
|
+
"Return JSON only matching:",
|
|
476
|
+
STORY_SCHEMA_SNIPPET,
|
|
477
|
+
"Rules:",
|
|
478
|
+
"- No tasks in this step.",
|
|
479
|
+
"- acceptanceCriteria must be an array of strings.",
|
|
480
|
+
"- Use docdex handles when citing docs.",
|
|
481
|
+
`Epic context (key=${epic.key ?? epic.localId ?? "TBD"}):`,
|
|
482
|
+
epic.description ?? "(no description provided)",
|
|
483
|
+
`Docs: ${docSummary || "none"}`,
|
|
484
|
+
].join("\n\n");
|
|
485
|
+
const { output } = await this.invokeAgentWithRetry(agent, prompt, "stories", stream, jobId, commandRunId, {
|
|
486
|
+
epicKey: epic.key ?? epic.localId,
|
|
487
|
+
});
|
|
488
|
+
const parsed = extractJson(output);
|
|
489
|
+
if (!parsed || !Array.isArray(parsed.stories) || parsed.stories.length === 0) {
|
|
490
|
+
throw new Error(`Agent did not return stories for epic ${epic.title}`);
|
|
491
|
+
}
|
|
492
|
+
return parsed.stories
|
|
493
|
+
.map((story, idx) => ({
|
|
494
|
+
localId: story.localId ?? `us${idx + 1}`,
|
|
495
|
+
title: story.title ?? "Story",
|
|
496
|
+
userStory: story.userStory ?? story.description,
|
|
497
|
+
description: story.description,
|
|
498
|
+
acceptanceCriteria: Array.isArray(story.acceptanceCriteria) ? story.acceptanceCriteria : [],
|
|
499
|
+
relatedDocs: Array.isArray(story.relatedDocs) ? story.relatedDocs : [],
|
|
500
|
+
priorityHint: typeof story.priorityHint === "number" ? story.priorityHint : undefined,
|
|
501
|
+
tasks: [],
|
|
502
|
+
}))
|
|
503
|
+
.filter((s) => s.title);
|
|
504
|
+
}
|
|
505
|
+
async generateTasksForStory(agent, epic, story, docSummary, stream, jobId, commandRunId) {
|
|
506
|
+
const prompt = [
|
|
507
|
+
`Generate tasks for story "${story.title}" (Epic: ${epic.title}).`,
|
|
508
|
+
"Use the Task template: Objective; Context; Inputs; Implementation Plan; DoD; Testing & QA; Dependencies; Risks; References.",
|
|
509
|
+
"Return JSON only matching:",
|
|
510
|
+
TASK_SCHEMA_SNIPPET,
|
|
511
|
+
"Rules:",
|
|
512
|
+
"- Each task must include localId, title, description, type, estimatedStoryPoints, priorityHint.",
|
|
513
|
+
"- dependsOnKeys must reference localIds in this story.",
|
|
514
|
+
"- Use docdex handles when citing docs.",
|
|
515
|
+
`Story context (key=${story.key ?? story.localId ?? "TBD"}):`,
|
|
516
|
+
story.description ?? story.userStory ?? "",
|
|
517
|
+
`Acceptance criteria: ${(story.acceptanceCriteria ?? []).join("; ")}`,
|
|
518
|
+
`Docs: ${docSummary || "none"}`,
|
|
519
|
+
].join("\n\n");
|
|
520
|
+
const { output } = await this.invokeAgentWithRetry(agent, prompt, "tasks", stream, jobId, commandRunId, {
|
|
521
|
+
epicKey: epic.key,
|
|
522
|
+
storyKey: story.key ?? story.localId,
|
|
523
|
+
});
|
|
524
|
+
const parsed = extractJson(output);
|
|
525
|
+
if (!parsed || !Array.isArray(parsed.tasks) || parsed.tasks.length === 0) {
|
|
526
|
+
throw new Error(`Agent did not return tasks for story ${story.title}`);
|
|
527
|
+
}
|
|
528
|
+
return parsed.tasks
|
|
529
|
+
.map((task, idx) => ({
|
|
530
|
+
localId: task.localId ?? `t${idx + 1}`,
|
|
531
|
+
title: task.title ?? "Task",
|
|
532
|
+
type: task.type,
|
|
533
|
+
description: task.description,
|
|
534
|
+
estimatedStoryPoints: typeof task.estimatedStoryPoints === "number" ? task.estimatedStoryPoints : undefined,
|
|
535
|
+
priorityHint: typeof task.priorityHint === "number" ? task.priorityHint : undefined,
|
|
536
|
+
dependsOnKeys: Array.isArray(task.dependsOnKeys) ? task.dependsOnKeys : [],
|
|
537
|
+
relatedDocs: Array.isArray(task.relatedDocs) ? task.relatedDocs : [],
|
|
538
|
+
}))
|
|
539
|
+
.filter((t) => t.title);
|
|
540
|
+
}
|
|
541
|
+
async generatePlanFromAgent(epics, agent, docSummary, options) {
|
|
542
|
+
const planEpics = epics.map((epic, idx) => ({
|
|
543
|
+
...epic,
|
|
544
|
+
localId: epic.localId ?? `e${idx + 1}`,
|
|
545
|
+
}));
|
|
546
|
+
const planStories = [];
|
|
547
|
+
const planTasks = [];
|
|
548
|
+
for (const epic of planEpics) {
|
|
549
|
+
const stories = await this.generateStoriesForEpic(agent, { ...epic }, docSummary, options.agentStream, options.jobId, options.commandRunId);
|
|
550
|
+
const limitedStories = stories.slice(0, options.maxStoriesPerEpic ?? stories.length);
|
|
551
|
+
limitedStories.forEach((story, idx) => {
|
|
552
|
+
planStories.push({
|
|
553
|
+
...story,
|
|
554
|
+
localId: story.localId ?? `us${idx + 1}`,
|
|
555
|
+
epicLocalId: epic.localId,
|
|
556
|
+
});
|
|
557
|
+
});
|
|
558
|
+
}
|
|
559
|
+
for (const story of planStories) {
|
|
560
|
+
const tasks = await this.generateTasksForStory(agent, { key: story.epicLocalId, title: story.title }, story, docSummary, options.agentStream, options.jobId, options.commandRunId);
|
|
561
|
+
const limitedTasks = tasks.slice(0, options.maxTasksPerStory ?? tasks.length);
|
|
562
|
+
limitedTasks.forEach((task, idx) => {
|
|
563
|
+
planTasks.push({
|
|
564
|
+
...task,
|
|
565
|
+
localId: task.localId ?? `t${idx + 1}`,
|
|
566
|
+
storyLocalId: story.localId,
|
|
567
|
+
epicLocalId: story.epicLocalId,
|
|
568
|
+
});
|
|
569
|
+
});
|
|
570
|
+
}
|
|
571
|
+
return { epics: planEpics, stories: planStories, tasks: planTasks };
|
|
572
|
+
}
|
|
573
|
+
async writePlanArtifacts(projectKey, plan, docSummary) {
|
|
574
|
+
const baseDir = path.join(this.workspace.workspaceRoot, ".mcoda", "tasks", projectKey);
|
|
575
|
+
await fs.mkdir(baseDir, { recursive: true });
|
|
576
|
+
const write = async (file, data) => {
|
|
577
|
+
const target = path.join(baseDir, file);
|
|
578
|
+
await fs.writeFile(target, JSON.stringify(data, null, 2), "utf8");
|
|
579
|
+
};
|
|
580
|
+
await write("plan.json", { projectKey, generatedAt: new Date().toISOString(), docSummary, ...plan });
|
|
581
|
+
await write("epics.json", plan.epics);
|
|
582
|
+
await write("stories.json", plan.stories);
|
|
583
|
+
await write("tasks.json", plan.tasks);
|
|
584
|
+
return { folder: baseDir };
|
|
585
|
+
}
|
|
586
|
+
async persistPlanToDb(projectId, projectKey, plan, jobId, commandRunId, options) {
|
|
587
|
+
const resetKeys = options?.resetKeys ?? false;
|
|
588
|
+
const existingEpicKeys = resetKeys ? [] : await this.workspaceRepo.listEpicKeys(projectId);
|
|
589
|
+
const epicKeyGen = createEpicKeyGenerator(projectKey, existingEpicKeys);
|
|
590
|
+
const epicInserts = [];
|
|
591
|
+
const epicMeta = [];
|
|
592
|
+
for (const epic of plan.epics) {
|
|
593
|
+
const key = epicKeyGen(epic.area);
|
|
594
|
+
epicInserts.push({
|
|
595
|
+
projectId,
|
|
596
|
+
key,
|
|
597
|
+
title: epic.title || `Epic ${key}`,
|
|
598
|
+
description: buildEpicDescription(key, epic.title || `Epic ${key}`, epic.description, epic.acceptanceCriteria, epic.relatedDocs),
|
|
599
|
+
storyPointsTotal: null,
|
|
600
|
+
priority: epic.priorityHint ?? (epicInserts.length + 1),
|
|
601
|
+
metadata: epic.relatedDocs ? { doc_links: epic.relatedDocs } : undefined,
|
|
602
|
+
});
|
|
603
|
+
epicMeta.push({ key, node: epic });
|
|
604
|
+
}
|
|
605
|
+
let epicRows = [];
|
|
606
|
+
let storyRows = [];
|
|
607
|
+
let taskRows = [];
|
|
608
|
+
let dependencyRows = [];
|
|
609
|
+
await this.workspaceRepo.withTransaction(async () => {
|
|
610
|
+
if (options?.force) {
|
|
611
|
+
await this.workspaceRepo.deleteProjectBacklog(projectId, false);
|
|
612
|
+
}
|
|
613
|
+
epicRows = await this.workspaceRepo.insertEpics(epicInserts, false);
|
|
614
|
+
const storyInserts = [];
|
|
615
|
+
const storyMeta = [];
|
|
616
|
+
for (const epic of epicMeta) {
|
|
617
|
+
const epicRow = epicRows.find((row) => row.key === epic.key);
|
|
618
|
+
if (!epicRow)
|
|
619
|
+
continue;
|
|
620
|
+
const stories = plan.stories.filter((s) => s.epicLocalId === epic.node.localId);
|
|
621
|
+
const existingStoryKeys = await this.workspaceRepo.listStoryKeys(epicRow.id);
|
|
622
|
+
const storyKeyGen = createStoryKeyGenerator(epicRow.key, existingStoryKeys);
|
|
623
|
+
for (const story of stories) {
|
|
624
|
+
const storyKey = storyKeyGen();
|
|
625
|
+
storyInserts.push({
|
|
626
|
+
projectId,
|
|
627
|
+
epicId: epicRow.id,
|
|
628
|
+
key: storyKey,
|
|
629
|
+
title: story.title || `Story ${storyKey}`,
|
|
630
|
+
description: buildStoryDescription(storyKey, story.title || `Story ${storyKey}`, story.userStory, story.description, story.acceptanceCriteria, story.relatedDocs),
|
|
631
|
+
acceptanceCriteria: story.acceptanceCriteria?.join("\n") ?? undefined,
|
|
632
|
+
storyPointsTotal: null,
|
|
633
|
+
priority: story.priorityHint ?? (storyInserts.length + 1),
|
|
634
|
+
metadata: story.relatedDocs ? { doc_links: story.relatedDocs } : undefined,
|
|
635
|
+
});
|
|
636
|
+
storyMeta.push({ storyKey, epicKey: epicRow.key, node: story });
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
storyRows = await this.workspaceRepo.insertStories(storyInserts, false);
|
|
640
|
+
const storyIdByKey = new Map(storyRows.map((row) => [row.key, row.id]));
|
|
641
|
+
const epicIdByKey = new Map(epicRows.map((row) => [row.key, row.id]));
|
|
642
|
+
const taskDetails = [];
|
|
643
|
+
for (const story of storyMeta) {
|
|
644
|
+
const storyId = storyIdByKey.get(story.storyKey);
|
|
645
|
+
const existingTaskKeys = storyId ? await this.workspaceRepo.listTaskKeys(storyId) : [];
|
|
646
|
+
const tasks = plan.tasks.filter((t) => t.storyLocalId === story.node.localId);
|
|
647
|
+
const taskKeyGen = createTaskKeyGenerator(story.storyKey, existingTaskKeys);
|
|
648
|
+
for (const task of tasks) {
|
|
649
|
+
const key = taskKeyGen();
|
|
650
|
+
const localId = task.localId ?? key;
|
|
651
|
+
taskDetails.push({
|
|
652
|
+
localId,
|
|
653
|
+
key,
|
|
654
|
+
storyKey: story.storyKey,
|
|
655
|
+
epicKey: story.epicKey,
|
|
656
|
+
plan: task,
|
|
657
|
+
});
|
|
658
|
+
}
|
|
659
|
+
}
|
|
660
|
+
const localToKey = new Map(taskDetails.map((t) => [t.localId, t.key]));
|
|
661
|
+
const taskInserts = [];
|
|
662
|
+
for (const task of taskDetails) {
|
|
663
|
+
const storyId = storyIdByKey.get(task.storyKey);
|
|
664
|
+
const epicId = epicIdByKey.get(task.epicKey);
|
|
665
|
+
if (!storyId || !epicId)
|
|
666
|
+
continue;
|
|
667
|
+
const depSlugs = (task.plan.dependsOnKeys ?? [])
|
|
668
|
+
.map((dep) => localToKey.get(dep))
|
|
669
|
+
.filter((value) => Boolean(value));
|
|
670
|
+
taskInserts.push({
|
|
671
|
+
projectId,
|
|
672
|
+
epicId,
|
|
673
|
+
userStoryId: storyId,
|
|
674
|
+
key: task.key,
|
|
675
|
+
title: task.plan.title ?? `Task ${task.key}`,
|
|
676
|
+
description: buildTaskDescription(task.key, task.plan.title ?? `Task ${task.key}`, task.plan.description, task.storyKey, task.epicKey, task.plan.relatedDocs, depSlugs),
|
|
677
|
+
type: task.plan.type ?? "feature",
|
|
678
|
+
status: "not_started",
|
|
679
|
+
storyPoints: task.plan.estimatedStoryPoints ?? null,
|
|
680
|
+
priority: task.plan.priorityHint ?? (taskInserts.length + 1),
|
|
681
|
+
metadata: task.plan.relatedDocs ? { doc_links: task.plan.relatedDocs } : undefined,
|
|
682
|
+
});
|
|
683
|
+
}
|
|
684
|
+
taskRows = await this.workspaceRepo.insertTasks(taskInserts, false);
|
|
685
|
+
const taskByLocal = new Map();
|
|
686
|
+
for (const detail of taskDetails) {
|
|
687
|
+
const row = taskRows.find((t) => t.key === detail.key);
|
|
688
|
+
if (row) {
|
|
689
|
+
taskByLocal.set(detail.localId, row);
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
const depKeys = new Set();
|
|
693
|
+
const dependencies = [];
|
|
694
|
+
for (const detail of taskDetails) {
|
|
695
|
+
const current = taskByLocal.get(detail.localId);
|
|
696
|
+
if (!current)
|
|
697
|
+
continue;
|
|
698
|
+
for (const dep of detail.plan.dependsOnKeys ?? []) {
|
|
699
|
+
const target = taskByLocal.get(dep);
|
|
700
|
+
if (!target || target.id === current.id)
|
|
701
|
+
continue;
|
|
702
|
+
const depKey = `${current.id}|${target.id}|blocks`;
|
|
703
|
+
if (depKeys.has(depKey))
|
|
704
|
+
continue;
|
|
705
|
+
depKeys.add(depKey);
|
|
706
|
+
dependencies.push({
|
|
707
|
+
taskId: current.id,
|
|
708
|
+
dependsOnTaskId: target.id,
|
|
709
|
+
relationType: "blocks",
|
|
710
|
+
});
|
|
711
|
+
}
|
|
712
|
+
}
|
|
713
|
+
if (dependencies.length > 0) {
|
|
714
|
+
dependencyRows = await this.workspaceRepo.insertTaskDependencies(dependencies, false);
|
|
715
|
+
}
|
|
716
|
+
// Roll up story and epic story point totals.
|
|
717
|
+
const storySpTotals = new Map();
|
|
718
|
+
for (const task of taskRows) {
|
|
719
|
+
if (typeof task.storyPoints === "number") {
|
|
720
|
+
storySpTotals.set(task.userStoryId, (storySpTotals.get(task.userStoryId) ?? 0) + task.storyPoints);
|
|
721
|
+
}
|
|
722
|
+
}
|
|
723
|
+
for (const [storyId, total] of storySpTotals.entries()) {
|
|
724
|
+
await this.workspaceRepo.updateStoryPointsTotal(storyId, total);
|
|
725
|
+
}
|
|
726
|
+
const epicSpTotals = new Map();
|
|
727
|
+
for (const story of storyRows) {
|
|
728
|
+
if (typeof story.storyPointsTotal === "number") {
|
|
729
|
+
epicSpTotals.set(story.epicId, (epicSpTotals.get(story.epicId) ?? 0) + (story.storyPointsTotal ?? 0));
|
|
730
|
+
}
|
|
731
|
+
}
|
|
732
|
+
for (const [epicId, total] of epicSpTotals.entries()) {
|
|
733
|
+
await this.workspaceRepo.updateEpicStoryPointsTotal(epicId, total);
|
|
734
|
+
}
|
|
735
|
+
const now = new Date().toISOString();
|
|
736
|
+
for (const task of taskRows) {
|
|
737
|
+
await this.workspaceRepo.createTaskRun({
|
|
738
|
+
taskId: task.id,
|
|
739
|
+
command: "create-tasks",
|
|
740
|
+
status: "succeeded",
|
|
741
|
+
jobId,
|
|
742
|
+
commandRunId,
|
|
743
|
+
startedAt: now,
|
|
744
|
+
finishedAt: now,
|
|
745
|
+
runContext: { key: task.key },
|
|
746
|
+
});
|
|
747
|
+
}
|
|
748
|
+
});
|
|
749
|
+
return { epics: epicRows, stories: storyRows, tasks: taskRows, dependencies: dependencyRows };
|
|
750
|
+
}
|
|
751
|
+
async createTasks(options) {
|
|
752
|
+
const agentStream = options.agentStream !== false;
|
|
753
|
+
const commandRun = await this.jobService.startCommandRun("create-tasks", options.projectKey);
|
|
754
|
+
const job = await this.jobService.startJob("create_tasks", commandRun.id, options.projectKey, {
|
|
755
|
+
commandName: "create-tasks",
|
|
756
|
+
payload: {
|
|
757
|
+
projectKey: options.projectKey,
|
|
758
|
+
inputs: options.inputs,
|
|
759
|
+
agent: options.agentName,
|
|
760
|
+
agentStream,
|
|
761
|
+
},
|
|
762
|
+
});
|
|
763
|
+
let lastError;
|
|
764
|
+
for (let attempt = 1; attempt <= CreateTasksService.MAX_BUSY_RETRIES; attempt++) {
|
|
765
|
+
try {
|
|
766
|
+
const project = await this.workspaceRepo.createProjectIfMissing({
|
|
767
|
+
key: options.projectKey,
|
|
768
|
+
name: options.projectKey,
|
|
769
|
+
description: `Workspace project ${options.projectKey}`,
|
|
770
|
+
});
|
|
771
|
+
const docs = await this.prepareDocs(options.inputs);
|
|
772
|
+
const { docSummary, warnings: docWarnings } = this.buildDocContext(docs);
|
|
773
|
+
const { prompt } = this.buildPrompt(options.projectKey, docs, options);
|
|
774
|
+
await this.jobService.writeCheckpoint(job.id, {
|
|
775
|
+
stage: "docs_indexed",
|
|
776
|
+
timestamp: new Date().toISOString(),
|
|
777
|
+
details: { count: docs.length, warnings: docWarnings },
|
|
778
|
+
});
|
|
779
|
+
const agent = await this.resolveAgent(options.agentName);
|
|
780
|
+
const { output: epicOutput } = await this.invokeAgentWithRetry(agent, prompt, "epics", agentStream, job.id, commandRun.id, { docWarnings });
|
|
781
|
+
const epics = this.parseEpics(epicOutput, docs, options.projectKey).slice(0, options.maxEpics ?? Number.MAX_SAFE_INTEGER);
|
|
782
|
+
await this.jobService.writeCheckpoint(job.id, {
|
|
783
|
+
stage: "epics_generated",
|
|
784
|
+
timestamp: new Date().toISOString(),
|
|
785
|
+
details: { epics: epics.length },
|
|
786
|
+
});
|
|
787
|
+
const plan = await this.generatePlanFromAgent(epics, agent, docSummary, {
|
|
788
|
+
agentStream,
|
|
789
|
+
jobId: job.id,
|
|
790
|
+
commandRunId: commandRun.id,
|
|
791
|
+
maxStoriesPerEpic: options.maxStoriesPerEpic,
|
|
792
|
+
maxTasksPerStory: options.maxTasksPerStory,
|
|
793
|
+
});
|
|
794
|
+
await this.jobService.writeCheckpoint(job.id, {
|
|
795
|
+
stage: "stories_generated",
|
|
796
|
+
timestamp: new Date().toISOString(),
|
|
797
|
+
details: { stories: plan.stories.length },
|
|
798
|
+
});
|
|
799
|
+
await this.jobService.writeCheckpoint(job.id, {
|
|
800
|
+
stage: "tasks_generated",
|
|
801
|
+
timestamp: new Date().toISOString(),
|
|
802
|
+
details: { tasks: plan.tasks.length },
|
|
803
|
+
});
|
|
804
|
+
const { folder } = await this.writePlanArtifacts(options.projectKey, plan, docSummary);
|
|
805
|
+
await this.jobService.writeCheckpoint(job.id, {
|
|
806
|
+
stage: "plan_written",
|
|
807
|
+
timestamp: new Date().toISOString(),
|
|
808
|
+
details: { folder },
|
|
809
|
+
});
|
|
810
|
+
const { epics: epicRows, stories: storyRows, tasks: taskRows, dependencies: dependencyRows } = await this.persistPlanToDb(project.id, options.projectKey, plan, job.id, commandRun.id, {
|
|
811
|
+
force: options.force,
|
|
812
|
+
resetKeys: options.force,
|
|
813
|
+
});
|
|
814
|
+
await this.jobService.updateJobStatus(job.id, "completed", {
|
|
815
|
+
payload: {
|
|
816
|
+
epicsCreated: epicRows.length,
|
|
817
|
+
storiesCreated: storyRows.length,
|
|
818
|
+
tasksCreated: taskRows.length,
|
|
819
|
+
dependenciesCreated: dependencyRows.length,
|
|
820
|
+
docs: docSummary,
|
|
821
|
+
planFolder: folder,
|
|
822
|
+
},
|
|
823
|
+
});
|
|
824
|
+
await this.jobService.finishCommandRun(commandRun.id, "succeeded");
|
|
825
|
+
return {
|
|
826
|
+
jobId: job.id,
|
|
827
|
+
commandRunId: commandRun.id,
|
|
828
|
+
epics: epicRows,
|
|
829
|
+
stories: storyRows,
|
|
830
|
+
tasks: taskRows,
|
|
831
|
+
dependencies: dependencyRows,
|
|
832
|
+
};
|
|
833
|
+
}
|
|
834
|
+
catch (error) {
|
|
835
|
+
lastError = error;
|
|
836
|
+
const message = error.message;
|
|
837
|
+
const isBusy = message?.includes("SQLITE_BUSY") ||
|
|
838
|
+
message?.includes("database is locked") ||
|
|
839
|
+
message?.includes("busy");
|
|
840
|
+
const remaining = CreateTasksService.MAX_BUSY_RETRIES - attempt;
|
|
841
|
+
if (isBusy && remaining > 0) {
|
|
842
|
+
const backoff = CreateTasksService.BUSY_BACKOFF_MS * attempt;
|
|
843
|
+
await this.jobService.appendLog(job.id, `Encountered SQLITE_BUSY, retrying create-tasks (attempt ${attempt}/${CreateTasksService.MAX_BUSY_RETRIES}) after ${backoff}ms...\n`);
|
|
844
|
+
await delay(backoff);
|
|
845
|
+
continue;
|
|
846
|
+
}
|
|
847
|
+
await this.jobService.updateJobStatus(job.id, "failed", { errorSummary: message });
|
|
848
|
+
await this.jobService.finishCommandRun(commandRun.id, "failed", message);
|
|
849
|
+
throw error;
|
|
850
|
+
}
|
|
851
|
+
}
|
|
852
|
+
await this.jobService.updateJobStatus(job.id, "failed", { errorSummary: lastError?.message });
|
|
853
|
+
await this.jobService.finishCommandRun(commandRun.id, "failed", lastError?.message);
|
|
854
|
+
throw lastError ?? new Error("create-tasks failed");
|
|
855
|
+
}
|
|
856
|
+
async migratePlanFromFolder(options) {
|
|
857
|
+
const projectKey = options.projectKey;
|
|
858
|
+
const commandRun = await this.jobService.startCommandRun("migrate-tasks", projectKey);
|
|
859
|
+
const job = await this.jobService.startJob("migrate_tasks", commandRun.id, projectKey, {
|
|
860
|
+
commandName: "migrate-tasks",
|
|
861
|
+
payload: { projectKey, planDir: options.planDir },
|
|
862
|
+
});
|
|
863
|
+
const planDir = options.planDir ?? path.join(this.workspace.workspaceRoot, ".mcoda", "tasks", projectKey);
|
|
864
|
+
try {
|
|
865
|
+
const planPath = path.join(planDir, "plan.json");
|
|
866
|
+
const loadJson = async (file) => {
|
|
867
|
+
try {
|
|
868
|
+
const raw = await fs.readFile(file, "utf8");
|
|
869
|
+
return JSON.parse(raw);
|
|
870
|
+
}
|
|
871
|
+
catch {
|
|
872
|
+
return undefined;
|
|
873
|
+
}
|
|
874
|
+
};
|
|
875
|
+
const planFromPlan = await loadJson(planPath);
|
|
876
|
+
const epicsFromFile = await loadJson(path.join(planDir, "epics.json"));
|
|
877
|
+
const storiesFromFile = await loadJson(path.join(planDir, "stories.json"));
|
|
878
|
+
const tasksFromFile = await loadJson(path.join(planDir, "tasks.json"));
|
|
879
|
+
const epics = epicsFromFile ?? planFromPlan?.epics;
|
|
880
|
+
const stories = storiesFromFile ?? planFromPlan?.stories;
|
|
881
|
+
const tasks = tasksFromFile ?? planFromPlan?.tasks;
|
|
882
|
+
const docSummary = planFromPlan?.docSummary;
|
|
883
|
+
if (!epics || !stories || !tasks) {
|
|
884
|
+
throw new Error(`Plan files missing required sections. Expected epics/stories/tasks in ${planDir} (plan.json or separate files).`);
|
|
885
|
+
}
|
|
886
|
+
const project = await this.workspaceRepo.createProjectIfMissing({
|
|
887
|
+
key: projectKey,
|
|
888
|
+
name: projectKey,
|
|
889
|
+
description: `Workspace project ${projectKey}`,
|
|
890
|
+
});
|
|
891
|
+
const plan = {
|
|
892
|
+
epics: epics,
|
|
893
|
+
stories: stories,
|
|
894
|
+
tasks: tasks,
|
|
895
|
+
};
|
|
896
|
+
const loadRefinePlans = async () => {
|
|
897
|
+
const candidates = [];
|
|
898
|
+
if (options.refinePlanPath)
|
|
899
|
+
candidates.push(options.refinePlanPath);
|
|
900
|
+
if (options.refinePlanPaths && options.refinePlanPaths.length)
|
|
901
|
+
candidates.push(...options.refinePlanPaths);
|
|
902
|
+
if (options.refinePlansDir) {
|
|
903
|
+
const dir = path.resolve(options.refinePlansDir);
|
|
904
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
905
|
+
for (const entry of entries) {
|
|
906
|
+
if (entry.isDirectory()) {
|
|
907
|
+
candidates.push(path.join(dir, entry.name, "plan.json"));
|
|
908
|
+
}
|
|
909
|
+
else if (entry.isFile() && entry.name.toLowerCase().endsWith(".json")) {
|
|
910
|
+
candidates.push(path.join(dir, entry.name));
|
|
911
|
+
}
|
|
912
|
+
}
|
|
913
|
+
}
|
|
914
|
+
const uniq = Array.from(new Set(candidates.map((p) => path.resolve(p))));
|
|
915
|
+
const existing = [];
|
|
916
|
+
for (const file of uniq) {
|
|
917
|
+
try {
|
|
918
|
+
await fs.access(file);
|
|
919
|
+
existing.push(file);
|
|
920
|
+
}
|
|
921
|
+
catch {
|
|
922
|
+
// ignore missing file candidates (e.g., directory entries without plan.json)
|
|
923
|
+
}
|
|
924
|
+
}
|
|
925
|
+
return existing.sort((a, b) => a.localeCompare(b));
|
|
926
|
+
};
|
|
927
|
+
const refinePlanPaths = await loadRefinePlans();
|
|
928
|
+
// If refinement plans are provided, default to wiping existing backlog to avoid mixing old tasks.
|
|
929
|
+
const forceBacklogReset = refinePlanPaths.length ? true : !!options.force;
|
|
930
|
+
await this.jobService.writeCheckpoint(job.id, {
|
|
931
|
+
stage: "plan_loaded",
|
|
932
|
+
timestamp: new Date().toISOString(),
|
|
933
|
+
details: { planDir, epics: plan.epics.length, stories: plan.stories.length, tasks: plan.tasks.length },
|
|
934
|
+
});
|
|
935
|
+
const { epics: epicRows, stories: storyRows, tasks: taskRows, dependencies: dependencyRows } = await this.persistPlanToDb(project.id, projectKey, plan, job.id, commandRun.id, {
|
|
936
|
+
force: forceBacklogReset,
|
|
937
|
+
resetKeys: forceBacklogReset,
|
|
938
|
+
});
|
|
939
|
+
await this.jobService.updateJobStatus(job.id, "completed", {
|
|
940
|
+
payload: {
|
|
941
|
+
epicsCreated: epicRows.length,
|
|
942
|
+
storiesCreated: storyRows.length,
|
|
943
|
+
tasksCreated: taskRows.length,
|
|
944
|
+
dependenciesCreated: dependencyRows.length,
|
|
945
|
+
docs: docSummary,
|
|
946
|
+
planFolder: planDir,
|
|
947
|
+
},
|
|
948
|
+
});
|
|
949
|
+
await this.jobService.finishCommandRun(commandRun.id, "succeeded");
|
|
950
|
+
// Optionally apply a refinement plan from disk after seeding the backlog.
|
|
951
|
+
if (refinePlanPaths.length > 0) {
|
|
952
|
+
const { RefineTasksService } = await import("./RefineTasksService.js");
|
|
953
|
+
const refineService = await RefineTasksService.create(this.workspace);
|
|
954
|
+
try {
|
|
955
|
+
for (const refinePlanPath of refinePlanPaths) {
|
|
956
|
+
await refineService.refineTasks({
|
|
957
|
+
workspace: this.workspace,
|
|
958
|
+
projectKey,
|
|
959
|
+
planInPath: path.resolve(refinePlanPath),
|
|
960
|
+
fromDb: true,
|
|
961
|
+
apply: true,
|
|
962
|
+
agentStream: false,
|
|
963
|
+
dryRun: false,
|
|
964
|
+
});
|
|
965
|
+
}
|
|
966
|
+
}
|
|
967
|
+
finally {
|
|
968
|
+
await refineService.close();
|
|
969
|
+
}
|
|
970
|
+
}
|
|
971
|
+
return {
|
|
972
|
+
jobId: job.id,
|
|
973
|
+
commandRunId: commandRun.id,
|
|
974
|
+
epics: epicRows,
|
|
975
|
+
stories: storyRows,
|
|
976
|
+
tasks: taskRows,
|
|
977
|
+
dependencies: dependencyRows,
|
|
978
|
+
};
|
|
979
|
+
}
|
|
980
|
+
catch (error) {
|
|
981
|
+
const message = error.message;
|
|
982
|
+
await this.jobService.updateJobStatus(job.id, "failed", { errorSummary: message });
|
|
983
|
+
await this.jobService.finishCommandRun(commandRun.id, "failed", message);
|
|
984
|
+
throw error;
|
|
985
|
+
}
|
|
986
|
+
}
|
|
987
|
+
}
|
|
988
|
+
CreateTasksService.MAX_BUSY_RETRIES = 6;
|
|
989
|
+
CreateTasksService.BUSY_BACKOFF_MS = 500;
|