@femtomc/mu-orchestrator 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +54 -0
- package/dist/dag_runner.d.ts +20 -0
- package/dist/dag_runner.d.ts.map +1 -0
- package/dist/dag_runner.js +465 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +7 -0
- package/dist/pi_backend.d.ts +21 -0
- package/dist/pi_backend.d.ts.map +1 -0
- package/dist/pi_backend.js +87 -0
- package/dist/prompt.d.ts +17 -0
- package/dist/prompt.d.ts.map +1 -0
- package/dist/prompt.js +153 -0
- package/package.json +19 -0
package/README.md
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# @femtomc/mu-orchestrator
|
|
2
|
+
|
|
3
|
+
Node DAG runner that drives `@femtomc/mu-issue` and `@femtomc/mu-forum` to execute ready leaf issues and log outcomes.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
After publishing:
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install @femtomc/mu-orchestrator
|
|
11
|
+
# or: bun add @femtomc/mu-orchestrator
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
From this repo:
|
|
15
|
+
|
|
16
|
+
```bash
|
|
17
|
+
cd mu
|
|
18
|
+
bun install
|
|
19
|
+
bun run build
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
## Usage
|
|
23
|
+
|
|
24
|
+
```ts
|
|
25
|
+
import { FsJsonlStore, fsEventLog, getStorePaths } from "@femtomc/mu-core/node";
|
|
26
|
+
import { ForumStore } from "@femtomc/mu-forum";
|
|
27
|
+
import { IssueStore } from "@femtomc/mu-issue";
|
|
28
|
+
import { DagRunner } from "@femtomc/mu-orchestrator";
|
|
29
|
+
|
|
30
|
+
const repoRoot = process.cwd();
|
|
31
|
+
const paths = getStorePaths(repoRoot);
|
|
32
|
+
const events = fsEventLog(paths.eventsPath);
|
|
33
|
+
|
|
34
|
+
const issues = new IssueStore(new FsJsonlStore(paths.issuesPath), { events });
|
|
35
|
+
const forum = new ForumStore(new FsJsonlStore(paths.forumPath), { events });
|
|
36
|
+
|
|
37
|
+
const runner = new DagRunner(issues, forum, repoRoot);
|
|
38
|
+
const result = await runner.run("mu-<root-id>");
|
|
39
|
+
console.log(result);
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
## Tests / Typecheck
|
|
43
|
+
|
|
44
|
+
From the `mu/` repo root:
|
|
45
|
+
|
|
46
|
+
```bash
|
|
47
|
+
bun test packages/orchestrator
|
|
48
|
+
bun run typecheck
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## Runtime
|
|
52
|
+
|
|
53
|
+
- **Node-only** (uses `node:child_process` + filesystem).
|
|
54
|
+
- Default backend is the `pi` CLI (`pi --mode json ...`). If you don't have `pi`, pass a custom `BackendRunner`.
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import { type EventLog } from "@femtomc/mu-core/node";
|
|
2
|
+
import type { ForumStore } from "@femtomc/mu-forum";
|
|
3
|
+
import type { IssueStore } from "@femtomc/mu-issue";
|
|
4
|
+
import type { BackendRunner } from "./pi_backend.js";
|
|
5
|
+
export type DagResult = {
|
|
6
|
+
status: "root_final" | "no_executable_leaf" | "max_steps_exhausted" | "error";
|
|
7
|
+
steps: number;
|
|
8
|
+
error: string;
|
|
9
|
+
};
|
|
10
|
+
export declare class DagRunner {
|
|
11
|
+
#private;
|
|
12
|
+
constructor(store: IssueStore, forum: ForumStore, repoRoot: string, opts?: {
|
|
13
|
+
backend?: BackendRunner;
|
|
14
|
+
events?: EventLog;
|
|
15
|
+
});
|
|
16
|
+
run(rootId: string, maxSteps?: number, opts?: {
|
|
17
|
+
review?: boolean;
|
|
18
|
+
}): Promise<DagResult>;
|
|
19
|
+
}
|
|
20
|
+
//# sourceMappingURL=dag_runner.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dag_runner.d.ts","sourceRoot":"","sources":["../src/dag_runner.ts"],"names":[],"mappings":"AAIA,OAAO,EAEN,KAAK,QAAQ,EAMb,MAAM,uBAAuB,CAAC;AAC/B,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAIrD,MAAM,MAAM,SAAS,GAAG;IACvB,MAAM,EAAE,YAAY,GAAG,oBAAoB,GAAG,qBAAqB,GAAG,OAAO,CAAC;IAC9E,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;CACd,CAAC;AAuBF,qBAAa,SAAS;;gBAepB,KAAK,EAAE,UAAU,EACjB,KAAK,EAAE,UAAU,EACjB,QAAQ,EAAE,MAAM,EAChB,IAAI,GAAE;QAAE,OAAO,CAAC,EAAE,aAAa,CAAC;QAAC,MAAM,CAAC,EAAE,QAAQ,CAAA;KAAO;IAiVpD,GAAG,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,GAAE,MAAW,EAAE,IAAI,GAAE;QAAE,MAAM,CAAC,EAAE,OAAO,CAAA;KAAO,GAAG,OAAO,CAAC,SAAS,CAAC;CAkLrG"}
|
|
@@ -0,0 +1,465 @@
|
|
|
1
|
+
import { existsSync } from "node:fs";
|
|
2
|
+
import { mkdir } from "node:fs/promises";
|
|
3
|
+
import { join, relative } from "node:path";
|
|
4
|
+
import { currentRunId, executionSpecFromDict, fsEventLogFromRepoRoot, getStorePaths, newRunId, runContext, } from "@femtomc/mu-core/node";
|
|
5
|
+
import { PiCliBackend } from "./pi_backend.js";
|
|
6
|
+
import { readPromptMeta, renderPromptTemplate } from "./prompt.js";
|
|
7
|
+
function roundTo(n, digits) {
|
|
8
|
+
const f = 10 ** digits;
|
|
9
|
+
return Math.round(n * f) / f;
|
|
10
|
+
}
|
|
11
|
+
function relPath(repoRoot, path) {
|
|
12
|
+
try {
|
|
13
|
+
const rel = relative(repoRoot, path);
|
|
14
|
+
return rel || path;
|
|
15
|
+
}
|
|
16
|
+
catch {
|
|
17
|
+
return path;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
export class DagRunner {
|
|
21
|
+
// Hardcoded fallbacks if neither execution_spec nor orchestrator.md provide config.
|
|
22
|
+
#fallbackCli = "pi";
|
|
23
|
+
#fallbackModel = "gpt-5.3-codex";
|
|
24
|
+
#fallbackReasoning = "xhigh";
|
|
25
|
+
#store;
|
|
26
|
+
#forum;
|
|
27
|
+
#repoRoot;
|
|
28
|
+
#events;
|
|
29
|
+
#backend;
|
|
30
|
+
#reorchestrateOutcomes = new Set(["failure", "needs_work"]);
|
|
31
|
+
constructor(store, forum, repoRoot, opts = {}) {
|
|
32
|
+
this.#store = store;
|
|
33
|
+
this.#forum = forum;
|
|
34
|
+
this.#repoRoot = repoRoot;
|
|
35
|
+
this.#events = opts.events ?? fsEventLogFromRepoRoot(repoRoot);
|
|
36
|
+
this.#backend = opts.backend ?? new PiCliBackend();
|
|
37
|
+
}
|
|
38
|
+
async #resolveConfig(issue) {
|
|
39
|
+
let cli = this.#fallbackCli;
|
|
40
|
+
let model = this.#fallbackModel;
|
|
41
|
+
let reasoning = this.#fallbackReasoning;
|
|
42
|
+
let promptPath = null;
|
|
43
|
+
// Tier 1: orchestrator.md frontmatter (global defaults).
|
|
44
|
+
const { orchestratorPath } = getStorePaths(this.#repoRoot);
|
|
45
|
+
if (existsSync(orchestratorPath)) {
|
|
46
|
+
const meta = await readPromptMeta(orchestratorPath);
|
|
47
|
+
if (typeof meta.cli === "string")
|
|
48
|
+
cli = meta.cli;
|
|
49
|
+
if (typeof meta.model === "string")
|
|
50
|
+
model = meta.model;
|
|
51
|
+
if (typeof meta.reasoning === "string")
|
|
52
|
+
reasoning = meta.reasoning;
|
|
53
|
+
promptPath = orchestratorPath;
|
|
54
|
+
}
|
|
55
|
+
// Parse execution spec (may set role + explicit fields).
|
|
56
|
+
const specDict = issue.execution_spec ?? null;
|
|
57
|
+
const spec = specDict ? executionSpecFromDict(specDict, this.#repoRoot) : null;
|
|
58
|
+
// Tier 2: role file frontmatter (role-specific defaults).
|
|
59
|
+
if (spec?.role) {
|
|
60
|
+
const rolePath = join(this.#repoRoot, ".mu", "roles", `${spec.role}.md`);
|
|
61
|
+
if (existsSync(rolePath)) {
|
|
62
|
+
const roleMeta = await readPromptMeta(rolePath);
|
|
63
|
+
if (typeof roleMeta.cli === "string")
|
|
64
|
+
cli = roleMeta.cli;
|
|
65
|
+
if (typeof roleMeta.model === "string")
|
|
66
|
+
model = roleMeta.model;
|
|
67
|
+
if (typeof roleMeta.reasoning === "string")
|
|
68
|
+
reasoning = roleMeta.reasoning;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
// Tier 3: execution_spec explicit fields (highest priority).
|
|
72
|
+
if (spec) {
|
|
73
|
+
if (spec.cli != null)
|
|
74
|
+
cli = spec.cli;
|
|
75
|
+
if (spec.model != null)
|
|
76
|
+
model = spec.model;
|
|
77
|
+
if (spec.reasoning != null)
|
|
78
|
+
reasoning = spec.reasoning;
|
|
79
|
+
if (spec.prompt_path != null)
|
|
80
|
+
promptPath = spec.prompt_path;
|
|
81
|
+
}
|
|
82
|
+
return { cli, model, reasoning, promptPath };
|
|
83
|
+
}
|
|
84
|
+
async #renderPrompt(issue, promptPath, rootId) {
|
|
85
|
+
let rendered;
|
|
86
|
+
if (promptPath && existsSync(promptPath)) {
|
|
87
|
+
rendered = await renderPromptTemplate(promptPath, issue, { repoRoot: this.#repoRoot });
|
|
88
|
+
}
|
|
89
|
+
else {
|
|
90
|
+
rendered = issue.title;
|
|
91
|
+
if (issue.body) {
|
|
92
|
+
rendered += `\n\n${issue.body}`;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
rendered += `\n\n## Inshallah Context\nRoot: ${rootId}\nAssigned issue: ${issue.id}\n`;
|
|
96
|
+
return rendered;
|
|
97
|
+
}
|
|
98
|
+
async #executeBackend(issue, cfg, rootId, opts = {}) {
|
|
99
|
+
const logSuffix = opts.logSuffix ?? "";
|
|
100
|
+
const rendered = await this.#renderPrompt(issue, cfg.promptPath, rootId);
|
|
101
|
+
const { logsDir } = getStorePaths(this.#repoRoot);
|
|
102
|
+
await mkdir(logsDir, { recursive: true });
|
|
103
|
+
const suffix = logSuffix ? `.${logSuffix}` : "";
|
|
104
|
+
const teePath = join(logsDir, `${issue.id}${suffix}.jsonl`);
|
|
105
|
+
await this.#events.emit("backend.run.start", {
|
|
106
|
+
source: "backend",
|
|
107
|
+
issueId: issue.id,
|
|
108
|
+
payload: {
|
|
109
|
+
cli: cfg.cli,
|
|
110
|
+
model: cfg.model,
|
|
111
|
+
reasoning: cfg.reasoning,
|
|
112
|
+
prompt_path: cfg.promptPath,
|
|
113
|
+
tee_path: relPath(this.#repoRoot, teePath),
|
|
114
|
+
log_suffix: logSuffix,
|
|
115
|
+
},
|
|
116
|
+
});
|
|
117
|
+
const t0 = Date.now();
|
|
118
|
+
const exitCode = await this.#backend.run({
|
|
119
|
+
issueId: issue.id,
|
|
120
|
+
prompt: rendered,
|
|
121
|
+
model: cfg.model,
|
|
122
|
+
thinking: cfg.reasoning,
|
|
123
|
+
cwd: this.#repoRoot,
|
|
124
|
+
cli: cfg.cli,
|
|
125
|
+
promptPath: cfg.promptPath,
|
|
126
|
+
logSuffix,
|
|
127
|
+
teePath,
|
|
128
|
+
});
|
|
129
|
+
const elapsedS = (Date.now() - t0) / 1000;
|
|
130
|
+
await this.#events.emit("backend.run.end", {
|
|
131
|
+
source: "backend",
|
|
132
|
+
issueId: issue.id,
|
|
133
|
+
payload: {
|
|
134
|
+
cli: cfg.cli,
|
|
135
|
+
exit_code: exitCode,
|
|
136
|
+
elapsed_s: roundTo(elapsedS, 3),
|
|
137
|
+
tee_path: relPath(this.#repoRoot, teePath),
|
|
138
|
+
log_suffix: logSuffix,
|
|
139
|
+
},
|
|
140
|
+
});
|
|
141
|
+
return { exitCode, elapsedS };
|
|
142
|
+
}
|
|
143
|
+
#hasReviewer() {
|
|
144
|
+
return existsSync(join(this.#repoRoot, ".mu", "roles", "reviewer.md"));
|
|
145
|
+
}
|
|
146
|
+
async #maybeReview(issue, rootId, step) {
|
|
147
|
+
const issueId = issue.id;
|
|
148
|
+
// Guards.
|
|
149
|
+
if (issue.outcome !== "success") {
|
|
150
|
+
return issue;
|
|
151
|
+
}
|
|
152
|
+
if (!this.#hasReviewer()) {
|
|
153
|
+
return issue;
|
|
154
|
+
}
|
|
155
|
+
await this.#events.emit("dag.review.start", {
|
|
156
|
+
source: "dag_runner",
|
|
157
|
+
issueId,
|
|
158
|
+
payload: { root_id: rootId, step },
|
|
159
|
+
});
|
|
160
|
+
const reviewIssue = { ...issue, execution_spec: { role: "reviewer" } };
|
|
161
|
+
const cfg = await this.#resolveConfig(reviewIssue);
|
|
162
|
+
const { exitCode, elapsedS } = await this.#executeBackend(reviewIssue, cfg, rootId, { logSuffix: "review" });
|
|
163
|
+
await this.#forum.post(`issue:${issueId}`, JSON.stringify({
|
|
164
|
+
step,
|
|
165
|
+
issue_id: issueId,
|
|
166
|
+
title: issue.title,
|
|
167
|
+
exit_code: exitCode,
|
|
168
|
+
elapsed_s: roundTo(elapsedS, 1),
|
|
169
|
+
type: "review",
|
|
170
|
+
}), "reviewer");
|
|
171
|
+
const updated = (await this.#store.get(issueId)) ?? issue;
|
|
172
|
+
await this.#events.emit("dag.review.end", {
|
|
173
|
+
source: "dag_runner",
|
|
174
|
+
issueId,
|
|
175
|
+
payload: { root_id: rootId, step, outcome: updated.outcome },
|
|
176
|
+
});
|
|
177
|
+
return updated;
|
|
178
|
+
}
|
|
179
|
+
async #reopenForOrchestration(issueId, opts) {
|
|
180
|
+
const reopened = await this.#store.update(issueId, { status: "open", outcome: null, execution_spec: null });
|
|
181
|
+
await this.#events.emit("dag.unstick.reopen", {
|
|
182
|
+
source: "dag_runner",
|
|
183
|
+
issueId,
|
|
184
|
+
payload: { reason: opts.reason, step: opts.step },
|
|
185
|
+
});
|
|
186
|
+
await this.#forum.post(`issue:${issueId}`, JSON.stringify({
|
|
187
|
+
step: opts.step,
|
|
188
|
+
issue_id: issueId,
|
|
189
|
+
title: reopened.title ?? "",
|
|
190
|
+
type: "reorchestrate",
|
|
191
|
+
reason: opts.reason,
|
|
192
|
+
}), "orchestrator");
|
|
193
|
+
}
|
|
194
|
+
async #maybeUnstick(rootId, step) {
|
|
195
|
+
const idsInScope = new Set(await this.#store.subtree_ids(rootId));
|
|
196
|
+
const rows = await this.#store.list();
|
|
197
|
+
// Build children mapping once.
|
|
198
|
+
const childrenOf = new Map();
|
|
199
|
+
for (const row of rows) {
|
|
200
|
+
for (const dep of row.deps ?? []) {
|
|
201
|
+
if (dep.type !== "parent")
|
|
202
|
+
continue;
|
|
203
|
+
const list = childrenOf.get(dep.target) ?? [];
|
|
204
|
+
list.push(row);
|
|
205
|
+
childrenOf.set(dep.target, list);
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
const hasOpenChildren = (issueId) => (childrenOf.get(issueId) ?? []).some((child) => child.status !== "closed");
|
|
209
|
+
const candidates = [];
|
|
210
|
+
for (const row of rows) {
|
|
211
|
+
if (!idsInScope.has(row.id))
|
|
212
|
+
continue;
|
|
213
|
+
if (row.status !== "closed")
|
|
214
|
+
continue;
|
|
215
|
+
const outcome = row.outcome;
|
|
216
|
+
if (outcome && this.#reorchestrateOutcomes.has(outcome)) {
|
|
217
|
+
if (hasOpenChildren(row.id))
|
|
218
|
+
continue;
|
|
219
|
+
candidates.push(row);
|
|
220
|
+
continue;
|
|
221
|
+
}
|
|
222
|
+
if (outcome === "expanded" && (childrenOf.get(row.id)?.length ?? 0) === 0) {
|
|
223
|
+
candidates.push(row);
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
if (candidates.length === 0) {
|
|
227
|
+
return false;
|
|
228
|
+
}
|
|
229
|
+
candidates.sort((a, b) => (a.priority ?? 3) - (b.priority ?? 3));
|
|
230
|
+
const target = candidates[0];
|
|
231
|
+
await this.#reopenForOrchestration(target.id, { reason: `was outcome=${target.outcome}`, step });
|
|
232
|
+
return true;
|
|
233
|
+
}
|
|
234
|
+
async #collapseReview(issue, rootId, step) {
|
|
235
|
+
const issueId = issue.id;
|
|
236
|
+
await this.#events.emit("dag.collapse_review.start", {
|
|
237
|
+
source: "dag_runner",
|
|
238
|
+
issueId,
|
|
239
|
+
payload: { root_id: rootId, step },
|
|
240
|
+
});
|
|
241
|
+
const kids = await this.#store.children(issueId);
|
|
242
|
+
const lines = [];
|
|
243
|
+
for (const kid of kids) {
|
|
244
|
+
lines.push(`- [${kid.outcome ?? "?"}] ${kid.id}: ${kid.title}`);
|
|
245
|
+
}
|
|
246
|
+
const childrenSummary = lines.join("\n");
|
|
247
|
+
const originalBody = issue.body || "";
|
|
248
|
+
const collapsePrompt = `# Collapse Review\n\n` +
|
|
249
|
+
`## Original Specification\n\n` +
|
|
250
|
+
`**${issue.title}**\n\n` +
|
|
251
|
+
`${originalBody}\n\n` +
|
|
252
|
+
`## Children Outcomes\n\n` +
|
|
253
|
+
`${childrenSummary}\n\n` +
|
|
254
|
+
`## Instructions\n\n` +
|
|
255
|
+
`All children of this issue have completed. Review whether their aggregate work satisfies the original specification above.\n\n` +
|
|
256
|
+
`If satisfied: no action needed (the issue will be marked successful).\n\n` +
|
|
257
|
+
`If NOT satisfied: mark the parent as needing work by running:\n\n` +
|
|
258
|
+
` \`mu issues update ${issueId} --outcome needs_work\`\n\n` +
|
|
259
|
+
`Then explain the gaps in the forum topic (issue:${issueId}).\n\n` +
|
|
260
|
+
`Do NOT create child issues yourself; the orchestrator will re-expand the issue into remediation children.\n`;
|
|
261
|
+
const reviewIssue = {
|
|
262
|
+
...issue,
|
|
263
|
+
title: `Collapse review: ${issue.title}`,
|
|
264
|
+
body: collapsePrompt,
|
|
265
|
+
execution_spec: { role: "reviewer" },
|
|
266
|
+
};
|
|
267
|
+
const cfg = await this.#resolveConfig(reviewIssue);
|
|
268
|
+
const { exitCode, elapsedS } = await this.#executeBackend(reviewIssue, { ...cfg, promptPath: null }, rootId, {
|
|
269
|
+
logSuffix: "collapse-review",
|
|
270
|
+
});
|
|
271
|
+
await this.#forum.post(`issue:${issueId}`, JSON.stringify({
|
|
272
|
+
step,
|
|
273
|
+
issue_id: issueId,
|
|
274
|
+
title: issue.title,
|
|
275
|
+
exit_code: exitCode,
|
|
276
|
+
elapsed_s: roundTo(elapsedS, 1),
|
|
277
|
+
type: "collapse-review",
|
|
278
|
+
}), "reviewer");
|
|
279
|
+
const newKids = await this.#store.children(issueId);
|
|
280
|
+
const openKids = newKids.filter((k) => k.status !== "closed");
|
|
281
|
+
const updated = (await this.#store.get(issueId)) ?? issue;
|
|
282
|
+
if (updated.status !== "closed") {
|
|
283
|
+
await this.#events.emit("dag.collapse_review.end", {
|
|
284
|
+
source: "dag_runner",
|
|
285
|
+
issueId,
|
|
286
|
+
payload: { root_id: rootId, step, status: updated.status, outcome: updated.outcome },
|
|
287
|
+
});
|
|
288
|
+
return;
|
|
289
|
+
}
|
|
290
|
+
if (updated.outcome && this.#reorchestrateOutcomes.has(updated.outcome)) {
|
|
291
|
+
await this.#events.emit("dag.collapse_review.end", {
|
|
292
|
+
source: "dag_runner",
|
|
293
|
+
issueId,
|
|
294
|
+
payload: { root_id: rootId, step, status: updated.status, outcome: updated.outcome },
|
|
295
|
+
});
|
|
296
|
+
return;
|
|
297
|
+
}
|
|
298
|
+
if (openKids.length > 0) {
|
|
299
|
+
await this.#events.emit("dag.collapse_review.end", {
|
|
300
|
+
source: "dag_runner",
|
|
301
|
+
issueId,
|
|
302
|
+
payload: {
|
|
303
|
+
root_id: rootId,
|
|
304
|
+
step,
|
|
305
|
+
status: updated.status,
|
|
306
|
+
outcome: updated.outcome,
|
|
307
|
+
open_kids: openKids.length,
|
|
308
|
+
},
|
|
309
|
+
});
|
|
310
|
+
return;
|
|
311
|
+
}
|
|
312
|
+
await this.#store.update(issueId, { outcome: "success" });
|
|
313
|
+
await this.#events.emit("dag.collapse_review.end", {
|
|
314
|
+
source: "dag_runner",
|
|
315
|
+
issueId,
|
|
316
|
+
payload: { root_id: rootId, step, outcome: "success" },
|
|
317
|
+
});
|
|
318
|
+
}
|
|
319
|
+
async run(rootId, maxSteps = 20, opts = {}) {
|
|
320
|
+
const review = opts.review ?? true;
|
|
321
|
+
const runId = currentRunId() ?? newRunId();
|
|
322
|
+
return await runContext({ runId }, async () => {
|
|
323
|
+
await this.#events.emit("dag.run.start", {
|
|
324
|
+
source: "dag_runner",
|
|
325
|
+
issueId: rootId,
|
|
326
|
+
payload: { root_id: rootId, max_steps: maxSteps, review },
|
|
327
|
+
});
|
|
328
|
+
let final = null;
|
|
329
|
+
try {
|
|
330
|
+
for (let i = 0; i < maxSteps; i++) {
|
|
331
|
+
const step = i + 1;
|
|
332
|
+
// 0. Unstick: failures / needs_work trigger re-orchestration.
|
|
333
|
+
await this.#maybeUnstick(rootId, step);
|
|
334
|
+
// 1. Collapse review (before termination check).
|
|
335
|
+
if (review && this.#hasReviewer()) {
|
|
336
|
+
const collapsible = await this.#store.collapsible(rootId);
|
|
337
|
+
if (collapsible.length > 0) {
|
|
338
|
+
await this.#collapseReview(collapsible[0], rootId, step);
|
|
339
|
+
continue;
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
// 2. Check termination.
|
|
343
|
+
const v = await this.#store.validate(rootId);
|
|
344
|
+
if (v.is_final) {
|
|
345
|
+
final = { status: "root_final", steps: i, error: "" };
|
|
346
|
+
return final;
|
|
347
|
+
}
|
|
348
|
+
// 3. Select next ready leaf.
|
|
349
|
+
const candidates = await this.#store.ready(rootId, { tags: ["node:agent"] });
|
|
350
|
+
if (candidates.length === 0) {
|
|
351
|
+
// Repair pass on the root to resolve deadlocks / bad expansions.
|
|
352
|
+
await this.#events.emit("dag.unstick.start", {
|
|
353
|
+
source: "dag_runner",
|
|
354
|
+
issueId: rootId,
|
|
355
|
+
payload: { root_id: rootId, step },
|
|
356
|
+
});
|
|
357
|
+
const rootIssue = await this.#store.get(rootId);
|
|
358
|
+
if (!rootIssue) {
|
|
359
|
+
final = { status: "error", steps: i, error: "root vanished" };
|
|
360
|
+
return final;
|
|
361
|
+
}
|
|
362
|
+
const idsInScope = new Set(await this.#store.subtree_ids(rootId));
|
|
363
|
+
const openIssues = (await this.#store.list({ status: "open" })).filter((r) => idsInScope.has(r.id));
|
|
364
|
+
const diag = `- open_issues: ${openIssues.length}\n` +
|
|
365
|
+
`- action: diagnose deadlocks or missing expansions and create executable leaf work\n` +
|
|
366
|
+
`- hint: run \`mu issues ready --root ${rootId}\` and \`mu issues list --root ${rootId}\`\n`;
|
|
367
|
+
const repairIssue = {
|
|
368
|
+
...rootIssue,
|
|
369
|
+
title: `Repair stuck DAG: ${rootIssue.title}`,
|
|
370
|
+
body: `${(rootIssue.body || "").trim()}\n\n## Runner Diagnostics\n\n${diag}`.trim(),
|
|
371
|
+
execution_spec: null,
|
|
372
|
+
};
|
|
373
|
+
const cfg = await this.#resolveConfig(repairIssue);
|
|
374
|
+
const { exitCode, elapsedS } = await this.#executeBackend(repairIssue, cfg, rootId, {
|
|
375
|
+
logSuffix: "unstick",
|
|
376
|
+
});
|
|
377
|
+
await this.#forum.post(`issue:${rootId}`, JSON.stringify({
|
|
378
|
+
step,
|
|
379
|
+
issue_id: rootId,
|
|
380
|
+
title: rootIssue.title ?? "",
|
|
381
|
+
exit_code: exitCode,
|
|
382
|
+
elapsed_s: roundTo(elapsedS, 1),
|
|
383
|
+
type: "unstick",
|
|
384
|
+
}), "orchestrator");
|
|
385
|
+
await this.#events.emit("dag.unstick.end", {
|
|
386
|
+
source: "dag_runner",
|
|
387
|
+
issueId: rootId,
|
|
388
|
+
payload: { root_id: rootId, step, exit_code: exitCode, elapsed_s: roundTo(elapsedS, 3) },
|
|
389
|
+
});
|
|
390
|
+
continue;
|
|
391
|
+
}
|
|
392
|
+
const issue = candidates[0];
|
|
393
|
+
const issueId = issue.id;
|
|
394
|
+
await this.#events.emit("dag.step.start", {
|
|
395
|
+
source: "dag_runner",
|
|
396
|
+
issueId,
|
|
397
|
+
payload: { root_id: rootId, step, title: issue.title ?? "" },
|
|
398
|
+
});
|
|
399
|
+
// 3. Claim.
|
|
400
|
+
await this.#events.emit("dag.claim", {
|
|
401
|
+
source: "dag_runner",
|
|
402
|
+
issueId,
|
|
403
|
+
payload: { root_id: rootId, step },
|
|
404
|
+
});
|
|
405
|
+
await this.#store.claim(issueId);
|
|
406
|
+
// 4. Route + 5. Render + 6. Execute.
|
|
407
|
+
const cfg = await this.#resolveConfig(issue);
|
|
408
|
+
const { exitCode, elapsedS } = await this.#executeBackend(issue, cfg, rootId);
|
|
409
|
+
// 7. Check postconditions.
|
|
410
|
+
let updated = await this.#store.get(issueId);
|
|
411
|
+
if (!updated) {
|
|
412
|
+
final = { status: "error", steps: step, error: "issue vanished" };
|
|
413
|
+
return final;
|
|
414
|
+
}
|
|
415
|
+
if (updated.status !== "closed") {
|
|
416
|
+
updated = await this.#store.close(issueId, "failure");
|
|
417
|
+
}
|
|
418
|
+
// 7b. Review phase.
|
|
419
|
+
if (review && updated.status === "closed") {
|
|
420
|
+
updated = await this.#maybeReview(updated, rootId, step);
|
|
421
|
+
}
|
|
422
|
+
// 8. Log to forum.
|
|
423
|
+
await this.#forum.post(`issue:${issueId}`, JSON.stringify({
|
|
424
|
+
step,
|
|
425
|
+
issue_id: issueId,
|
|
426
|
+
title: issue.title,
|
|
427
|
+
exit_code: exitCode,
|
|
428
|
+
outcome: updated.outcome,
|
|
429
|
+
elapsed_s: roundTo(elapsedS, 1),
|
|
430
|
+
}), "orchestrator");
|
|
431
|
+
await this.#events.emit("dag.step.end", {
|
|
432
|
+
source: "dag_runner",
|
|
433
|
+
issueId,
|
|
434
|
+
payload: {
|
|
435
|
+
root_id: rootId,
|
|
436
|
+
step,
|
|
437
|
+
exit_code: exitCode,
|
|
438
|
+
elapsed_s: roundTo(elapsedS, 3),
|
|
439
|
+
outcome: updated.outcome,
|
|
440
|
+
},
|
|
441
|
+
});
|
|
442
|
+
// 9. Re-orchestrate on failure / needs_work.
|
|
443
|
+
if (updated.outcome && this.#reorchestrateOutcomes.has(updated.outcome)) {
|
|
444
|
+
await this.#reopenForOrchestration(issueId, { reason: `outcome=${updated.outcome}`, step });
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
final = { status: "max_steps_exhausted", steps: maxSteps, error: "" };
|
|
448
|
+
return final;
|
|
449
|
+
}
|
|
450
|
+
catch (err) {
|
|
451
|
+
final = { status: "error", steps: 0, error: err instanceof Error ? err.message : String(err) };
|
|
452
|
+
return final;
|
|
453
|
+
}
|
|
454
|
+
finally {
|
|
455
|
+
if (final) {
|
|
456
|
+
await this.#events.emit("dag.run.end", {
|
|
457
|
+
source: "dag_runner",
|
|
458
|
+
issueId: rootId,
|
|
459
|
+
payload: { root_id: rootId, status: final.status, steps: final.steps, error: final.error },
|
|
460
|
+
});
|
|
461
|
+
}
|
|
462
|
+
}
|
|
463
|
+
});
|
|
464
|
+
}
|
|
465
|
+
}
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export type { DagResult } from "./dag_runner.js";
|
|
2
|
+
export { DagRunner } from "./dag_runner.js";
|
|
3
|
+
export type { BackendRunner, BackendRunOpts } from "./pi_backend.js";
|
|
4
|
+
export { PiCliBackend, piStreamHasError } from "./pi_backend.js";
|
|
5
|
+
export type { PromptMeta } from "./prompt.js";
|
|
6
|
+
export { buildRoleCatalog, extractDescription, readPromptMeta, renderPromptTemplate, splitFrontmatter, } from "./prompt.js";
|
|
7
|
+
export declare function orchestratorHello(): string;
|
|
8
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAC5C,YAAY,EAAE,aAAa,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AACrE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AACjE,YAAY,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAC9C,OAAO,EACN,gBAAgB,EAChB,kBAAkB,EAClB,cAAc,EACd,oBAAoB,EACpB,gBAAgB,GAChB,MAAM,aAAa,CAAC;AAGrB,wBAAgB,iBAAiB,IAAI,MAAM,CAE1C"}
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export { DagRunner } from "./dag_runner.js";
|
|
2
|
+
export { PiCliBackend, piStreamHasError } from "./pi_backend.js";
|
|
3
|
+
export { buildRoleCatalog, extractDescription, readPromptMeta, renderPromptTemplate, splitFrontmatter, } from "./prompt.js";
|
|
4
|
+
// Back-compat placeholder API used by other packages/tests.
|
|
5
|
+
export function orchestratorHello() {
|
|
6
|
+
return "orchestrator(forum,issue)";
|
|
7
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
export type BackendRunOpts = {
|
|
2
|
+
issueId: string;
|
|
3
|
+
prompt: string;
|
|
4
|
+
model: string;
|
|
5
|
+
thinking: string;
|
|
6
|
+
cwd: string;
|
|
7
|
+
cli: string;
|
|
8
|
+
promptPath: string | null;
|
|
9
|
+
logSuffix: string;
|
|
10
|
+
onLine?: (line: string) => void;
|
|
11
|
+
teePath?: string;
|
|
12
|
+
};
|
|
13
|
+
export interface BackendRunner {
|
|
14
|
+
run(opts: BackendRunOpts): Promise<number>;
|
|
15
|
+
}
|
|
16
|
+
export declare function piStreamHasError(line: string): boolean;
|
|
17
|
+
export declare class PiCliBackend implements BackendRunner {
|
|
18
|
+
#private;
|
|
19
|
+
run(opts: BackendRunOpts): Promise<number>;
|
|
20
|
+
}
|
|
21
|
+
//# sourceMappingURL=pi_backend.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pi_backend.d.ts","sourceRoot":"","sources":["../src/pi_backend.ts"],"names":[],"mappings":"AAMA,MAAM,MAAM,cAAc,GAAG;IAC5B,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,CAAC;IACjB,GAAG,EAAE,MAAM,CAAC;IACZ,GAAG,EAAE,MAAM,CAAC;IACZ,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAC1B,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;IAChC,OAAO,CAAC,EAAE,MAAM,CAAC;CACjB,CAAC;AAEF,MAAM,WAAW,aAAa;IAC7B,GAAG,CAAC,IAAI,EAAE,cAAc,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;CAC3C;AAED,wBAAgB,gBAAgB,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CA4BtD;AAED,qBAAa,YAAa,YAAW,aAAa;;IAKpC,GAAG,CAAC,IAAI,EAAE,cAAc,GAAG,OAAO,CAAC,MAAM,CAAC;CA0DvD"}
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import { spawn } from "node:child_process";
|
|
2
|
+
import { mkdir, open } from "node:fs/promises";
|
|
3
|
+
import { dirname } from "node:path";
|
|
4
|
+
import { createInterface } from "node:readline";
|
|
5
|
+
import { PassThrough } from "node:stream";
|
|
6
|
+
export function piStreamHasError(line) {
|
|
7
|
+
let event;
|
|
8
|
+
try {
|
|
9
|
+
event = JSON.parse(line);
|
|
10
|
+
}
|
|
11
|
+
catch {
|
|
12
|
+
return false;
|
|
13
|
+
}
|
|
14
|
+
const etype = event?.type;
|
|
15
|
+
if (etype === "message_update") {
|
|
16
|
+
const assistantEvent = event?.assistantMessageEvent;
|
|
17
|
+
if (assistantEvent && typeof assistantEvent === "object" && assistantEvent.type === "error") {
|
|
18
|
+
return true;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
if (etype === "message_end") {
|
|
22
|
+
const message = event?.message;
|
|
23
|
+
if (!message || typeof message !== "object") {
|
|
24
|
+
return false;
|
|
25
|
+
}
|
|
26
|
+
if (message.role !== "assistant") {
|
|
27
|
+
return false;
|
|
28
|
+
}
|
|
29
|
+
return message.stopReason === "error" || message.stopReason === "aborted";
|
|
30
|
+
}
|
|
31
|
+
return false;
|
|
32
|
+
}
|
|
33
|
+
export class PiCliBackend {
|
|
34
|
+
#buildArgv(prompt, model, thinking) {
|
|
35
|
+
return ["pi", "--mode", "json", "--no-session", "--model", model, "--thinking", thinking, prompt];
|
|
36
|
+
}
|
|
37
|
+
async run(opts) {
|
|
38
|
+
if (opts.cli !== "pi") {
|
|
39
|
+
throw new Error(`unsupported backend cli=${JSON.stringify(opts.cli)} (only "pi" is supported)`);
|
|
40
|
+
}
|
|
41
|
+
const argv = this.#buildArgv(opts.prompt, opts.model, opts.thinking);
|
|
42
|
+
let teeFh = null;
|
|
43
|
+
try {
|
|
44
|
+
if (opts.teePath) {
|
|
45
|
+
await mkdir(dirname(opts.teePath), { recursive: true });
|
|
46
|
+
teeFh = await open(opts.teePath, "w");
|
|
47
|
+
}
|
|
48
|
+
const proc = spawn(argv[0], argv.slice(1), {
|
|
49
|
+
cwd: opts.cwd,
|
|
50
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
51
|
+
});
|
|
52
|
+
const merged = new PassThrough();
|
|
53
|
+
proc.stdout?.pipe(merged);
|
|
54
|
+
proc.stderr?.pipe(merged);
|
|
55
|
+
let sawAssistantError = false;
|
|
56
|
+
const rl = createInterface({ input: merged, crlfDelay: Number.POSITIVE_INFINITY });
|
|
57
|
+
const readLoop = (async () => {
|
|
58
|
+
for await (const line of rl) {
|
|
59
|
+
const trimmed = String(line);
|
|
60
|
+
if (piStreamHasError(trimmed)) {
|
|
61
|
+
sawAssistantError = true;
|
|
62
|
+
}
|
|
63
|
+
opts.onLine?.(trimmed);
|
|
64
|
+
if (teeFh) {
|
|
65
|
+
// Preserve Python's JSONL tee behavior: write one line per line.
|
|
66
|
+
await teeFh.write(`${trimmed}\n`);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
})();
|
|
70
|
+
const exitCode = await new Promise((resolve, reject) => {
|
|
71
|
+
proc.once("error", (err) => reject(err));
|
|
72
|
+
proc.once("close", (code) => resolve(code ?? 0));
|
|
73
|
+
});
|
|
74
|
+
// Ensure the reader finishes (and flushes tee writes).
|
|
75
|
+
await readLoop;
|
|
76
|
+
if (exitCode === 0 && sawAssistantError) {
|
|
77
|
+
return 1;
|
|
78
|
+
}
|
|
79
|
+
return exitCode;
|
|
80
|
+
}
|
|
81
|
+
finally {
|
|
82
|
+
if (teeFh) {
|
|
83
|
+
await teeFh.close();
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
package/dist/prompt.d.ts
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { Issue } from "@femtomc/mu-core";
|
|
2
|
+
export type PromptMeta = Record<string, unknown>;
|
|
3
|
+
export declare function splitFrontmatter(text: string): {
|
|
4
|
+
meta: PromptMeta;
|
|
5
|
+
body: string;
|
|
6
|
+
};
|
|
7
|
+
export declare function extractDescription(meta: PromptMeta, body: string): {
|
|
8
|
+
description: string;
|
|
9
|
+
source: string;
|
|
10
|
+
};
|
|
11
|
+
export declare function readPromptMeta(path: string): Promise<PromptMeta>;
|
|
12
|
+
export declare function buildRoleCatalog(repoRoot: string): Promise<string>;
|
|
13
|
+
export declare function renderPromptTemplate(path: string, issue: Pick<Issue, "id" | "title" | "body">, opts?: {
|
|
14
|
+
repoRoot?: string;
|
|
15
|
+
}): Promise<string>;
|
|
16
|
+
export declare function resolvePromptPath(repoRoot: string, promptPath: string): string;
|
|
17
|
+
//# sourceMappingURL=prompt.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"prompt.d.ts","sourceRoot":"","sources":["../src/prompt.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAE9C,MAAM,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAyCjD,wBAAgB,gBAAgB,CAAC,IAAI,EAAE,MAAM,GAAG;IAAE,IAAI,EAAE,UAAU,CAAC;IAAC,IAAI,EAAE,MAAM,CAAA;CAAE,CA6BjF;AAYD,wBAAgB,kBAAkB,CAAC,IAAI,EAAE,UAAU,EAAE,IAAI,EAAE,MAAM,GAAG;IAAE,WAAW,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,MAAM,CAAA;CAAE,CAW1G;AAED,wBAAsB,cAAc,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,UAAU,CAAC,CAItE;AAMD,wBAAsB,gBAAgB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAwCxE;AAED,wBAAsB,oBAAoB,CACzC,IAAI,EAAE,MAAM,EACZ,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,IAAI,GAAG,OAAO,GAAG,MAAM,CAAC,EAC3C,IAAI,GAAE;IAAE,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAO,GAC9B,OAAO,CAAC,MAAM,CAAC,CAmBjB;AAED,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,GAAG,MAAM,CAK9E"}
|
package/dist/prompt.js
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
import { readdir, readFile } from "node:fs/promises";
|
|
2
|
+
import { isAbsolute, join, relative } from "node:path";
|
|
3
|
+
function stripQuotes(s) {
|
|
4
|
+
const trimmed = s.trim();
|
|
5
|
+
if (trimmed.length >= 2) {
|
|
6
|
+
const first = trimmed[0];
|
|
7
|
+
const last = trimmed[trimmed.length - 1];
|
|
8
|
+
if ((first === `"` && last === `"`) || (first === `'` && last === `'`)) {
|
|
9
|
+
return trimmed.slice(1, -1);
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
return trimmed;
|
|
13
|
+
}
|
|
14
|
+
function parseSimpleYamlFrontmatter(text) {
|
|
15
|
+
// We only need a small subset: flat `key: value` mappings.
|
|
16
|
+
// If parsing fails, we return {} (mirrors Python behavior).
|
|
17
|
+
const out = {};
|
|
18
|
+
const lines = text.split(/\r?\n/);
|
|
19
|
+
for (const rawLine of lines) {
|
|
20
|
+
const line = rawLine.trim();
|
|
21
|
+
if (line.length === 0) {
|
|
22
|
+
continue;
|
|
23
|
+
}
|
|
24
|
+
if (line.startsWith("#")) {
|
|
25
|
+
continue;
|
|
26
|
+
}
|
|
27
|
+
const idx = line.indexOf(":");
|
|
28
|
+
if (idx <= 0) {
|
|
29
|
+
continue;
|
|
30
|
+
}
|
|
31
|
+
const key = line.slice(0, idx).trim();
|
|
32
|
+
if (!key) {
|
|
33
|
+
continue;
|
|
34
|
+
}
|
|
35
|
+
const value = stripQuotes(line.slice(idx + 1));
|
|
36
|
+
out[key] = value;
|
|
37
|
+
}
|
|
38
|
+
return out;
|
|
39
|
+
}
|
|
40
|
+
export function splitFrontmatter(text) {
|
|
41
|
+
const lines = text.split(/\r?\n/);
|
|
42
|
+
if (lines.length === 0 || lines[0]?.trim() !== "---") {
|
|
43
|
+
return { meta: {}, body: text };
|
|
44
|
+
}
|
|
45
|
+
// Find the terminating `---` line.
|
|
46
|
+
let endIdx = -1;
|
|
47
|
+
for (let i = 1; i < lines.length; i++) {
|
|
48
|
+
if (lines[i]?.trim() === "---") {
|
|
49
|
+
endIdx = i;
|
|
50
|
+
break;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
if (endIdx < 0) {
|
|
54
|
+
return { meta: {}, body: text };
|
|
55
|
+
}
|
|
56
|
+
try {
|
|
57
|
+
const metaText = lines.slice(1, endIdx).join("\n");
|
|
58
|
+
const body = lines
|
|
59
|
+
.slice(endIdx + 1)
|
|
60
|
+
.join("\n")
|
|
61
|
+
.replace(/^\n+/, "");
|
|
62
|
+
const meta = parseSimpleYamlFrontmatter(metaText);
|
|
63
|
+
return { meta, body };
|
|
64
|
+
}
|
|
65
|
+
catch {
|
|
66
|
+
return { meta: {}, body: text };
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
function firstNonEmptyLine(text) {
|
|
70
|
+
for (const line of text.split(/\r?\n/)) {
|
|
71
|
+
const stripped = line.trim();
|
|
72
|
+
if (stripped) {
|
|
73
|
+
return stripped;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
return "";
|
|
77
|
+
}
|
|
78
|
+
export function extractDescription(meta, body) {
|
|
79
|
+
const raw = meta.description;
|
|
80
|
+
const desc = typeof raw === "string" ? raw.trim() : "";
|
|
81
|
+
if (desc) {
|
|
82
|
+
return { description: desc, source: "frontmatter" };
|
|
83
|
+
}
|
|
84
|
+
const bodyDesc = firstNonEmptyLine(body);
|
|
85
|
+
if (bodyDesc) {
|
|
86
|
+
return { description: bodyDesc, source: "body" };
|
|
87
|
+
}
|
|
88
|
+
return { description: "", source: "none" };
|
|
89
|
+
}
|
|
90
|
+
export async function readPromptMeta(path) {
|
|
91
|
+
const text = await readFile(path, "utf8");
|
|
92
|
+
const { meta } = splitFrontmatter(text);
|
|
93
|
+
return meta;
|
|
94
|
+
}
|
|
95
|
+
function toPosixPath(path) {
|
|
96
|
+
return path.replaceAll("\\", "/");
|
|
97
|
+
}
|
|
98
|
+
export async function buildRoleCatalog(repoRoot) {
|
|
99
|
+
const rolesDir = join(repoRoot, ".mu", "roles");
|
|
100
|
+
let entries;
|
|
101
|
+
try {
|
|
102
|
+
entries = await readdir(rolesDir);
|
|
103
|
+
}
|
|
104
|
+
catch {
|
|
105
|
+
return "";
|
|
106
|
+
}
|
|
107
|
+
const roleFiles = entries.filter((e) => e.endsWith(".md")).sort();
|
|
108
|
+
const sections = [];
|
|
109
|
+
for (const file of roleFiles) {
|
|
110
|
+
const abs = join(rolesDir, file);
|
|
111
|
+
const text = await readFile(abs, "utf8");
|
|
112
|
+
const { meta, body } = splitFrontmatter(text);
|
|
113
|
+
const name = file.replace(/\.md$/, "");
|
|
114
|
+
const promptPath = toPosixPath(relative(repoRoot, abs));
|
|
115
|
+
const { description, source } = extractDescription(meta, body);
|
|
116
|
+
const parts = [];
|
|
117
|
+
for (const key of ["cli", "model", "reasoning"]) {
|
|
118
|
+
if (key in meta) {
|
|
119
|
+
parts.push(`${key}: ${String(meta[key])}`);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
const configLine = parts.length > 0 ? parts.join(" | ") : "default config";
|
|
123
|
+
const catalogDesc = description || "No description provided.";
|
|
124
|
+
sections.push(`### ${name}\n` +
|
|
125
|
+
`description: ${catalogDesc}\n` +
|
|
126
|
+
`description_source: ${source}\n` +
|
|
127
|
+
`prompt: ${promptPath}\n` +
|
|
128
|
+
`config: ${configLine}`);
|
|
129
|
+
}
|
|
130
|
+
return sections.join("\n\n");
|
|
131
|
+
}
|
|
132
|
+
export async function renderPromptTemplate(path, issue, opts = {}) {
|
|
133
|
+
const text = await readFile(path, "utf8");
|
|
134
|
+
const { body } = splitFrontmatter(text);
|
|
135
|
+
let promptText = issue.title ?? "";
|
|
136
|
+
if (issue.body) {
|
|
137
|
+
promptText += `\n\n${issue.body}`;
|
|
138
|
+
}
|
|
139
|
+
let rendered = body;
|
|
140
|
+
rendered = rendered.replaceAll("{{PROMPT}}", promptText);
|
|
141
|
+
rendered = rendered.replaceAll("{{ISSUE_ID}}", issue.id ?? "");
|
|
142
|
+
if (rendered.includes("{{ROLES}}")) {
|
|
143
|
+
const catalog = opts.repoRoot ? await buildRoleCatalog(opts.repoRoot) : "";
|
|
144
|
+
rendered = rendered.replaceAll("{{ROLES}}", catalog);
|
|
145
|
+
}
|
|
146
|
+
return rendered;
|
|
147
|
+
}
|
|
148
|
+
export function resolvePromptPath(repoRoot, promptPath) {
|
|
149
|
+
if (isAbsolute(promptPath)) {
|
|
150
|
+
return promptPath;
|
|
151
|
+
}
|
|
152
|
+
return join(repoRoot, promptPath);
|
|
153
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@femtomc/mu-orchestrator",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"main": "./dist/index.js",
|
|
6
|
+
"types": "./dist/index.d.ts",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": {
|
|
9
|
+
"types": "./dist/index.d.ts",
|
|
10
|
+
"default": "./dist/index.js"
|
|
11
|
+
}
|
|
12
|
+
},
|
|
13
|
+
"files": ["dist/**"],
|
|
14
|
+
"dependencies": {
|
|
15
|
+
"@femtomc/mu-core": "0.1.0",
|
|
16
|
+
"@femtomc/mu-forum": "0.1.0",
|
|
17
|
+
"@femtomc/mu-issue": "0.1.0"
|
|
18
|
+
}
|
|
19
|
+
}
|