@runfusion/fusion 0.20.0 → 0.22.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. package/dist/bin.js +2021 -996
  2. package/dist/client/assets/AgentDetailView-BKKpbp1S.js +18 -0
  3. package/dist/client/assets/AgentDetailView-CeO_1MK7.css +1 -0
  4. package/dist/client/assets/AgentsView-BRXFmrcJ.js +527 -0
  5. package/dist/client/assets/AgentsView-Bs03ptrd.css +1 -0
  6. package/dist/client/assets/ChatView-D7L2e_qu.js +1 -0
  7. package/dist/client/assets/DevServerView-l8RCyL2k.js +1 -0
  8. package/dist/client/assets/DirectoryPicker-CS1dwqcC.js +1 -0
  9. package/dist/client/assets/DocumentsView-DmthQWDZ.js +1 -0
  10. package/dist/client/assets/{InsightsView-Cqim12az.js → InsightsView-DvXpMKmH.js} +2 -2
  11. package/dist/client/assets/{MemoryView-CakLoJtY.js → MemoryView-CPwlKnUI.js} +2 -2
  12. package/dist/client/assets/{NodesView-BxGm3poT.js → NodesView-BLlfUfsy.js} +3 -3
  13. package/dist/client/assets/{PiExtensionsManager-lJbmskyZ.js → PiExtensionsManager-j8rPXqmB.js} +2 -2
  14. package/dist/client/assets/PluginManager-pW6RMz5z.js +1 -0
  15. package/dist/client/assets/ResearchView-D9DNJYDq.js +1 -0
  16. package/dist/client/assets/{RoadmapsView-CeKks_OI.js → RoadmapsView-Djc_X35v.js} +2 -2
  17. package/dist/client/assets/SettingsModal-WGCF_pk8.js +31 -0
  18. package/dist/client/assets/{SettingsModal-YdeVPhRJ.js → SettingsModal-fxvTFLtR.js} +1 -1
  19. package/dist/client/assets/SetupWizardModal-tG_MF_nA.js +1 -0
  20. package/dist/client/assets/SkillsView-Ddf0YL8z.js +1 -0
  21. package/dist/client/assets/agentSkills-DDHJnrkn.css +1 -0
  22. package/dist/client/assets/agentSkills-EwIwBlG8.js +1 -0
  23. package/dist/client/assets/folder-open-BiJpmnaT.js +6 -0
  24. package/dist/client/assets/index-D6ebxTPF.css +1 -0
  25. package/dist/client/assets/index-DYDLmOcK.js +694 -0
  26. package/dist/client/assets/{star-DxVRh9VT.js → star-BwRZmiuZ.js} +2 -2
  27. package/dist/client/assets/upload-D4NwZhPp.js +6 -0
  28. package/dist/client/assets/{users-3SD3oNMQ.js → users-DNISDtI1.js} +2 -2
  29. package/dist/client/index.html +2 -2
  30. package/dist/client/version.json +1 -1
  31. package/dist/droid-cli/package.json +1 -1
  32. package/dist/extension.js +1172 -401
  33. package/dist/pi-claude-cli/package.json +1 -1
  34. package/dist/plugins/fusion-plugin-dependency-graph/package.json +1 -1
  35. package/dist/plugins/fusion-plugin-hermes-runtime/bundled.js +480 -0
  36. package/dist/plugins/fusion-plugin-hermes-runtime/manifest.json +14 -0
  37. package/dist/plugins/fusion-plugin-hermes-runtime/package.json +11 -0
  38. package/dist/plugins/fusion-plugin-openclaw-runtime/bundled.js +369 -0
  39. package/dist/plugins/fusion-plugin-openclaw-runtime/manifest.json +14 -0
  40. package/dist/plugins/fusion-plugin-openclaw-runtime/package.json +11 -0
  41. package/dist/plugins/fusion-plugin-paperclip-runtime/bundled.js +966 -0
  42. package/dist/plugins/fusion-plugin-paperclip-runtime/manifest.json +15 -0
  43. package/dist/plugins/fusion-plugin-paperclip-runtime/package.json +11 -0
  44. package/package.json +3 -1
  45. package/skill/fusion/references/engine-tools.md +1 -1
  46. package/dist/client/assets/AgentDetailView-C6BG7O7i.js +0 -18
  47. package/dist/client/assets/AgentDetailView-CUtWvXBn.css +0 -1
  48. package/dist/client/assets/ChatView-DeXUYwSY.js +0 -1
  49. package/dist/client/assets/DevServerView-Dariyxt_.js +0 -1
  50. package/dist/client/assets/DirectoryPicker-SchiK-Aq.js +0 -1
  51. package/dist/client/assets/DocumentsView-C6v-tBhG.js +0 -1
  52. package/dist/client/assets/PluginManager-BZjNNf9m.js +0 -1
  53. package/dist/client/assets/ResearchView-Bzsr9V0y.js +0 -1
  54. package/dist/client/assets/SettingsModal-D-9CLguN.js +0 -31
  55. package/dist/client/assets/SetupWizardModal-DAC04LlA.js +0 -1
  56. package/dist/client/assets/SkillsView-CClC_5RN.js +0 -1
  57. package/dist/client/assets/index-CrHLf3pB.js +0 -1222
  58. package/dist/client/assets/index-Df1bHDY4.css +0 -1
package/dist/extension.js CHANGED
@@ -1183,6 +1183,7 @@ function agentToConfigSnapshot(agent) {
1183
1183
  role: agent.role,
1184
1184
  title: agent.title,
1185
1185
  icon: agent.icon,
1186
+ imageUrl: agent.imageUrl,
1186
1187
  reportsTo: agent.reportsTo,
1187
1188
  runtimeConfig: agent.runtimeConfig ? { ...agent.runtimeConfig } : void 0,
1188
1189
  permissions: agent.permissions ? { ...agent.permissions } : void 0,
@@ -1204,6 +1205,7 @@ function diffConfigSnapshots(before, after) {
1204
1205
  "role",
1205
1206
  "title",
1206
1207
  "icon",
1208
+ "imageUrl",
1207
1209
  "reportsTo",
1208
1210
  "runtimeConfig",
1209
1211
  "permissions",
@@ -1574,12 +1576,10 @@ Output Requirements:
1574
1576
  };
1575
1577
  AGENT_VALID_TRANSITIONS = {
1576
1578
  idle: ["active"],
1577
- active: ["running", "paused", "terminated"],
1578
- running: ["active", "paused", "error", "terminated"],
1579
- paused: ["active", "terminated"],
1580
- error: ["active", "terminated"],
1581
- terminated: ["idle", "active", "running"]
1582
- // Can be restarted or reset
1579
+ active: ["idle", "running", "paused", "error"],
1580
+ running: ["idle", "active", "paused", "error"],
1581
+ paused: ["idle", "active"],
1582
+ error: ["idle", "active"]
1583
1583
  };
1584
1584
  AGENT_PERMISSIONS = [
1585
1585
  "tasks:assign",
@@ -3813,6 +3813,7 @@ This means a caller passed a .fusion directory where a project root was expected
3813
3813
  );
3814
3814
  this.migrate();
3815
3815
  this.ensureRoutinesSchemaCompatibility();
3816
+ this.ensureInsightRunsSchemaCompatibility();
3816
3817
  const configNow = (/* @__PURE__ */ new Date()).toISOString();
3817
3818
  this.db.exec(
3818
3819
  `INSERT OR IGNORE INTO config (id, nextId, nextWorkflowStepId, settings, workflowSteps, updatedAt) VALUES (1, 1, 1, '${JSON.stringify(DEFAULT_PROJECT_SETTINGS)}', '[]', '${configNow}')`
@@ -3860,6 +3861,23 @@ This means a caller passed a .fusion directory where a project root was expected
3860
3861
  this.db.exec("CREATE INDEX IF NOT EXISTS idxRoutinesEnabled ON routines(enabled)");
3861
3862
  this.db.exec("CREATE INDEX IF NOT EXISTS idxRoutinesScope ON routines(scope)");
3862
3863
  }
3864
+ /**
3865
+ * Applies idempotent compatibility fixes for the project_insight_runs table.
3866
+ *
3867
+ * The `lifecycle` and `cancelledAt` columns were added to SCHEMA_SQL and
3868
+ * retroactively inserted into migration v33's CREATE TABLE, with a safety-net
3869
+ * in migration v59. However, databases that were already at v59+ when the
3870
+ * commit landed never re-run v59, leaving the columns missing. Running this
3871
+ * unconditionally on every init guarantees the columns exist.
3872
+ */
3873
+ ensureInsightRunsSchemaCompatibility() {
3874
+ if (!this.hasTable("project_insight_runs")) {
3875
+ return;
3876
+ }
3877
+ this.addColumnIfMissing("project_insight_runs", "lifecycle", "TEXT");
3878
+ this.addColumnIfMissing("project_insight_runs", "cancelledAt", "TEXT");
3879
+ this.db.exec(`CREATE INDEX IF NOT EXISTS idxInsightRunsProjectTriggerStatus ON project_insight_runs(projectId, trigger, status)`);
3880
+ }
3863
3881
  migrate() {
3864
3882
  const version = this.getSchemaVersion() || 1;
3865
3883
  if (version >= SCHEMA_VERSION) return;
@@ -5537,6 +5555,7 @@ var init_agent_store = __esm({
5537
5555
  metadata,
5538
5556
  ...input.title && { title: input.title },
5539
5557
  ...input.icon && { icon: input.icon },
5558
+ ...input.imageUrl && { imageUrl: input.imageUrl },
5540
5559
  ...input.reportsTo && { reportsTo: input.reportsTo },
5541
5560
  ...runtimeConfig && { runtimeConfig },
5542
5561
  ...input.permissions && { permissions: input.permissions },
@@ -5920,6 +5939,7 @@ var init_agent_store = __esm({
5920
5939
  updatedAt,
5921
5940
  ..."title" in updates && { title: updates.title },
5922
5941
  ..."icon" in updates && { icon: updates.icon },
5942
+ ..."imageUrl" in updates && { imageUrl: updates.imageUrl },
5923
5943
  ..."reportsTo" in updates && { reportsTo: updates.reportsTo },
5924
5944
  ..."runtimeConfig" in updates && { runtimeConfig: updates.runtimeConfig },
5925
5945
  ..."pauseReason" in updates && { pauseReason: updates.pauseReason },
@@ -6045,7 +6065,9 @@ var init_agent_store = __esm({
6045
6065
  state: newState,
6046
6066
  updatedAt: (/* @__PURE__ */ new Date()).toISOString(),
6047
6067
  // Clear lastError when transitioning away from terminated
6048
- ...currentState === "terminated" && newState !== "terminated" && { lastError: void 0 }
6068
+ // Clear lastError when an agent re-enters an actionable state so
6069
+ // a resumed agent does not carry stale "Error" badges.
6070
+ ...(newState === "active" || newState === "running") && { lastError: void 0 }
6049
6071
  };
6050
6072
  await this.writeAgent(updated);
6051
6073
  this.emit("agent:stateChanged", agentId, currentState, newState);
@@ -6260,9 +6282,6 @@ var init_agent_store = __esm({
6260
6282
  if (activeRun) {
6261
6283
  await this.endHeartbeatRun(activeRun.id, "terminated");
6262
6284
  }
6263
- if (agent.state !== "idle" && agent.state !== "terminated") {
6264
- agent = await this.updateAgentState(agentId, "terminated");
6265
- }
6266
6285
  if (agent.state !== "idle") {
6267
6286
  agent = await this.updateAgentState(agentId, "idle");
6268
6287
  }
@@ -6849,6 +6868,7 @@ var init_agent_store = __esm({
6849
6868
  role: snapshot.role,
6850
6869
  title: snapshot.title,
6851
6870
  icon: snapshot.icon,
6871
+ imageUrl: snapshot.imageUrl,
6852
6872
  reportsTo: snapshot.reportsTo,
6853
6873
  runtimeConfig: snapshot.runtimeConfig ? { ...snapshot.runtimeConfig } : void 0,
6854
6874
  permissions: snapshot.permissions ? { ...snapshot.permissions } : void 0,
@@ -7072,6 +7092,7 @@ var init_agent_store = __esm({
7072
7092
  metadata: data.metadata ?? {},
7073
7093
  title: data.title,
7074
7094
  icon: data.icon,
7095
+ imageUrl: data.imageUrl,
7075
7096
  reportsTo: data.reportsTo,
7076
7097
  runtimeConfig: data.runtimeConfig,
7077
7098
  pauseReason: data.pauseReason,
@@ -7100,6 +7121,7 @@ var init_agent_store = __esm({
7100
7121
  metadata: agent.metadata,
7101
7122
  title: agent.title,
7102
7123
  icon: agent.icon,
7124
+ imageUrl: agent.imageUrl,
7103
7125
  reportsTo: agent.reportsTo,
7104
7126
  runtimeConfig: agent.runtimeConfig,
7105
7127
  pauseReason: agent.pauseReason,
@@ -32528,20 +32550,17 @@ This project has OpenClaw-style memory files:
32528
32550
 
32529
32551
  **At the end of execution (before calling \`fn_task_done()\`):**
32530
32552
  1. Review what you learned during this task that would genuinely benefit future runs
32531
- 2. Write durable decisions, conventions, and pitfalls to \`.fusion/memory/MEMORY.md\`
32532
- 3. Write running observations, unresolved context, and open loops to today's \`.fusion/memory/YYYY-MM-DD.md\`
32533
- 4. **If nothing durable was learned, skip the memory update entirely** \u2014 do not append trivial or task-specific notes
32534
- 5. Only write when you have genuinely durable, reusable insights such as:
32535
- - New architectural patterns or module boundaries discovered
32536
- - Conventions or standards that should be followed
32537
- - Pitfalls or anti-patterns to avoid in future work
32538
- - Important constraints or context that affects implementation decisions
32539
- 6. **Avoid** writing task-specific trivia such as:
32540
- - Per-task implementation logs or changelog entries
32541
- - Transient failures resolved without broader lessons
32542
- - One-off file paths, variable names, or minor code changes
32543
- - Notes about what you did rather than what future agents should know
32544
- 7. **Consolidate when possible**: If an existing entry already covers a concept, update or refine it rather than adding a duplicate. Delete entries that are no longer accurate.
32553
+ 2. Choose scope intentionally:
32554
+ - Use \`fn_memory_append(scope="agent")\` for your private operating context (personal checklists, delegation habits, temporary playbooks, self-improvement notes)
32555
+ - Use \`fn_memory_append(scope="project")\` for repository-wide durable knowledge any future agent should know
32556
+ 3. Choose layer intentionally:
32557
+ - \`layer="long-term"\` for durable conventions/decisions/pitfalls
32558
+ - \`layer="daily"\` for running observations, unresolved context, and open loops
32559
+ 4. If using project scope with file backend, write long-term memory to \`.fusion/memory/MEMORY.md\` and daily notes to today's \`.fusion/memory/YYYY-MM-DD.md\`
32560
+ 5. **If nothing durable was learned, skip the memory update entirely** \u2014 do not append trivial or task-specific notes
32561
+ 6. Only write to **project** memory when the insight is genuinely reusable across the workspace (architecture patterns, shared conventions, durable pitfalls, cross-task constraints)
32562
+ 7. **Do not** write private/ephemeral items to project memory, such as personal TODOs, one-off scratch notes, or preferences that only help you as an individual agent
32563
+ 8. **Consolidate when possible**: If an existing entry already covers a concept, update or refine it rather than adding a duplicate. Delete entries that are no longer accurate.
32545
32564
 
32546
32565
  **Format for additions:** Add bullet points under the relevant section heading:
32547
32566
  - Use \`- \` prefix for list items
@@ -32561,18 +32580,15 @@ This project has a memory system that stores durable project learnings accumulat
32561
32580
 
32562
32581
  **At the end of execution (before calling \`fn_task_done()\`):**
32563
32582
  1. Review what you learned during this task that would genuinely benefit future runs
32564
- 2. **If nothing durable was learned, skip the memory update entirely** \u2014 do not append trivial or task-specific notes
32565
- 3. Only write when you have genuinely durable, reusable insights such as:
32566
- - New architectural patterns or module boundaries discovered
32567
- - Conventions or standards that should be followed
32568
- - Pitfalls or anti-patterns to avoid in future work
32569
- - Important constraints or context that affects implementation decisions
32570
- 4. **Avoid** writing task-specific trivia such as:
32571
- - Per-task implementation logs or changelog entries
32572
- - Transient failures resolved without broader lessons
32573
- - One-off file paths, variable names, or minor code changes
32574
- - Notes about what you did rather than what future agents should know
32575
- 5. Consolidate when possible: refine an existing memory entry instead of adding duplicates.
32583
+ 2. Choose scope intentionally:
32584
+ - Use \`fn_memory_append(scope="agent")\` for your private operating context
32585
+ - Use \`fn_memory_append(scope="project")\` only for repo-wide durable knowledge
32586
+ 3. Choose layer intentionally:
32587
+ - \`layer="long-term"\` for durable conventions/decisions/pitfalls
32588
+ - \`layer="daily"\` for running observations and open loops
32589
+ 4. **If nothing durable was learned, skip the memory update entirely** \u2014 do not append trivial or task-specific notes
32590
+ 5. **Avoid task-specific trivia** in project scope (for example: personal reminders, one-off scratch thoughts, individual communication preferences)
32591
+ 6. Consolidate when possible: refine an existing memory entry instead of adding duplicates.
32576
32592
  `;
32577
32593
  }
32578
32594
  function buildReviewerMemoryInstructions(rootDir, settings) {
@@ -56322,8 +56338,12 @@ function createFusionAuthStorage() {
56322
56338
  const primary = AuthStorage.create(getFusionAuthPath());
56323
56339
  let supplementalCredentials = readSupplementalCredentials();
56324
56340
  let modelsJsonApiKeys = readModelsJsonApiKeys();
56341
+ const loggedOutProviders = /* @__PURE__ */ new Set();
56325
56342
  const syncSupplementalOauthCredentials = () => {
56326
56343
  for (const [provider, credential] of Object.entries(supplementalCredentials)) {
56344
+ if (loggedOutProviders.has(provider)) {
56345
+ continue;
56346
+ }
56327
56347
  const current = primary.get(provider);
56328
56348
  if (!shouldHydrateStoredCredential(current, credential)) {
56329
56349
  continue;
@@ -56344,6 +56364,24 @@ function createFusionAuthStorage() {
56344
56364
  return true;
56345
56365
  },
56346
56366
  get(target, prop, receiver) {
56367
+ if (prop === "logout") {
56368
+ return (provider) => {
56369
+ target.logout(provider);
56370
+ loggedOutProviders.add(provider);
56371
+ };
56372
+ }
56373
+ if (prop === "remove") {
56374
+ return (provider) => {
56375
+ target.remove(provider);
56376
+ loggedOutProviders.add(provider);
56377
+ };
56378
+ }
56379
+ if (prop === "set") {
56380
+ return (provider, credential) => {
56381
+ target.set(provider, credential);
56382
+ loggedOutProviders.delete(provider);
56383
+ };
56384
+ }
56347
56385
  if (prop === "reload") {
56348
56386
  return () => {
56349
56387
  target.reload();
@@ -56353,25 +56391,43 @@ function createFusionAuthStorage() {
56353
56391
  };
56354
56392
  }
56355
56393
  if (prop === "get") {
56356
- return (provider) => choosePreferredStoredCredential(
56357
- target.get(provider),
56358
- supplementalCredentials[provider]
56359
- );
56394
+ return (provider) => {
56395
+ if (loggedOutProviders.has(provider)) {
56396
+ return void 0;
56397
+ }
56398
+ return choosePreferredStoredCredential(
56399
+ target.get(provider),
56400
+ supplementalCredentials[provider]
56401
+ );
56402
+ };
56360
56403
  }
56361
56404
  if (prop === "has") {
56362
- return (provider) => target.has(provider) || provider in supplementalCredentials || modelsJsonApiKeys.has(provider);
56405
+ return (provider) => {
56406
+ if (loggedOutProviders.has(provider)) {
56407
+ return false;
56408
+ }
56409
+ return target.has(provider) || provider in supplementalCredentials || modelsJsonApiKeys.has(provider);
56410
+ };
56363
56411
  }
56364
56412
  if (prop === "hasAuth") {
56365
- return (provider) => target.hasAuth(provider) || Boolean(supplementalCredentials[provider]) || modelsJsonApiKeys.has(provider);
56413
+ return (provider) => {
56414
+ if (loggedOutProviders.has(provider)) {
56415
+ return false;
56416
+ }
56417
+ return target.hasAuth(provider) || Boolean(supplementalCredentials[provider]) || modelsJsonApiKeys.has(provider);
56418
+ };
56366
56419
  }
56367
56420
  if (prop === "getAll") {
56368
56421
  return () => {
56369
56422
  const providerIds = /* @__PURE__ */ new Set([
56370
- ...Object.keys(supplementalCredentials),
56371
- ...Object.keys(target.getAll())
56423
+ ...Object.keys(target.getAll()),
56424
+ ...loggedOutProviders.size > 0 ? Object.keys(supplementalCredentials).filter((p) => !loggedOutProviders.has(p)) : Object.keys(supplementalCredentials)
56372
56425
  ]);
56373
56426
  const merged = {};
56374
56427
  for (const providerId of providerIds) {
56428
+ if (loggedOutProviders.has(providerId)) {
56429
+ continue;
56430
+ }
56375
56431
  const credential = choosePreferredStoredCredential(
56376
56432
  target.get(providerId),
56377
56433
  supplementalCredentials[providerId]
@@ -56384,10 +56440,26 @@ function createFusionAuthStorage() {
56384
56440
  };
56385
56441
  }
56386
56442
  if (prop === "list") {
56387
- return () => Array.from(/* @__PURE__ */ new Set([...Object.keys(supplementalCredentials), ...target.list(), ...modelsJsonApiKeys.keys()]));
56443
+ return () => {
56444
+ const providers = /* @__PURE__ */ new Set([...target.list()]);
56445
+ for (const p of modelsJsonApiKeys.keys()) {
56446
+ if (!loggedOutProviders.has(p)) {
56447
+ providers.add(p);
56448
+ }
56449
+ }
56450
+ for (const p of Object.keys(supplementalCredentials)) {
56451
+ if (!loggedOutProviders.has(p)) {
56452
+ providers.add(p);
56453
+ }
56454
+ }
56455
+ return Array.from(providers).filter((p) => !loggedOutProviders.has(p));
56456
+ };
56388
56457
  }
56389
56458
  if (prop === "getApiKey") {
56390
56459
  return async (provider) => {
56460
+ if (loggedOutProviders.has(provider)) {
56461
+ return void 0;
56462
+ }
56391
56463
  const primaryKey = await target.getApiKey(provider);
56392
56464
  if (primaryKey) return primaryKey;
56393
56465
  const supplementalKey = resolveStoredCredentialApiKey(provider, supplementalCredentials[provider]);
@@ -56641,7 +56713,8 @@ async function flushMemoryBeforeSessionCompaction(session) {
56641
56713
  }
56642
56714
  const flushPrompt = [
56643
56715
  "Before context compaction, preserve only unresolved durable memory if needed.",
56644
- "If fn_memory_append is available and you learned reusable project decisions, conventions, pitfalls, or open loops that are not already saved, append them now.",
56716
+ "If fn_memory_append is available and you learned reusable project decisions/conventions/pitfalls/open loops or private operating context that is not already saved, append it now.",
56717
+ 'Use scope="project" for shared workspace knowledge and scope="agent" for private operating context.',
56645
56718
  'Use layer="long-term" for durable facts and layer="daily" for running notes/open loops.',
56646
56719
  "If there is nothing durable to save, reply exactly: NONE."
56647
56720
  ].join("\n");
@@ -57135,7 +57208,10 @@ async function createFnAgent2(options) {
57135
57208
  if (selectionResult.diagnostics.length > 0) {
57136
57209
  const purpose = effectiveSkillSelection.sessionPurpose ?? "skills";
57137
57210
  for (const diag of selectionResult.diagnostics) {
57138
- piLog.warn(`[skills] [${purpose}] ${diag.type}: ${diag.message}`);
57211
+ const msg = `[skills] [${purpose}] ${diag.type}: ${diag.message}`;
57212
+ if (diag.type === "error") piLog.error(msg);
57213
+ else if (diag.type === "warning") piLog.warn(msg);
57214
+ else piLog.log(msg);
57139
57215
  }
57140
57216
  }
57141
57217
  skillsOverrideFn = createSkillsOverrideFromSelection(selectionResult, {
@@ -58601,6 +58677,33 @@ function buildSystemPromptWithInstructions(basePrompt, instructions) {
58601
58677
 
58602
58678
  ${instructions}`;
58603
58679
  }
58680
+ function buildPluginPromptSection(surface, pluginRunner) {
58681
+ if (!pluginRunner) {
58682
+ return "";
58683
+ }
58684
+ const contributions = pluginRunner.getPromptContributionsForSurface(surface);
58685
+ if (contributions.length === 0) {
58686
+ return "";
58687
+ }
58688
+ const prependByPlugin = /* @__PURE__ */ new Map();
58689
+ const appendByPlugin = /* @__PURE__ */ new Map();
58690
+ for (const { pluginId, contribution } of contributions) {
58691
+ const target = contribution.position === "prepend" ? prependByPlugin : appendByPlugin;
58692
+ const existing = target.get(pluginId) ?? [];
58693
+ existing.push(contribution.content);
58694
+ target.set(pluginId, existing);
58695
+ }
58696
+ const toSections = (group) => {
58697
+ return Array.from(group.entries()).map(([pluginId, contents]) => {
58698
+ return `## Plugin: ${pluginId}
58699
+
58700
+ ${contents.join("\n\n")}`;
58701
+ });
58702
+ };
58703
+ const sections = [...toSections(prependByPlugin), ...toSections(appendByPlugin)];
58704
+ log11.log(`Applied ${contributions.length} prompt contributions for surface '${surface}'`);
58705
+ return sections.join("\n\n");
58706
+ }
58604
58707
  var log11, MAX_INSTRUCTIONS_PATH_LENGTH, MAX_INSTRUCTIONS_TEXT_LENGTH, MAX_SOUL_LENGTH, MAX_MEMORY_LENGTH;
58605
58708
  var init_agent_instructions = __esm({
58606
58709
  "../engine/src/agent-instructions.ts"() {
@@ -59155,7 +59258,7 @@ function createMemoryAppendTool(rootDir, settings, options) {
59155
59258
  return {
59156
59259
  name: "fn_memory_append",
59157
59260
  label: "Append Memory",
59158
- description: "Append concise Markdown to project memory. Use long-term only for durable conventions/decisions/pitfalls; use daily for running observations and open loops. Skip this tool when there is no reusable memory.",
59261
+ description: 'Append concise Markdown to memory. Use scope="agent" for private operating context and scope="project" for workspace-wide durable knowledge. Use layer="long-term" for durable conventions/decisions/pitfalls and layer="daily" for running observations/open loops.',
59159
59262
  parameters: memoryAppendParams,
59160
59263
  execute: async (_id, params) => {
59161
59264
  const content = params.content.trim();
@@ -61314,6 +61417,17 @@ async function reviewStep(cwd, taskId, stepNumber, stepName, reviewType, promptC
61314
61417
  reviewerBasePrompt + memorySection,
61315
61418
  reviewerInstructions
61316
61419
  );
61420
+ const reviewerContributions = options.pluginRunner?.getPromptContributionsForSurface("reviewer") ?? [];
61421
+ if (reviewerContributions.length > 0) {
61422
+ reviewerLog.log(`applied ${reviewerContributions.length} plugin prompt contributions for reviewer surface`);
61423
+ }
61424
+ const reviewerPluginContributions = buildPluginPromptSection(
61425
+ "reviewer",
61426
+ options.pluginRunner
61427
+ );
61428
+ const reviewerSystemPromptFinal = reviewerPluginContributions ? `${reviewerSystemPrompt}
61429
+
61430
+ ${reviewerPluginContributions}` : reviewerSystemPrompt;
61317
61431
  let skillContext = void 0;
61318
61432
  if (options.agentStore && options.rootDir) {
61319
61433
  try {
@@ -61359,7 +61473,7 @@ async function reviewStep(cwd, taskId, stepNumber, stepName, reviewType, promptC
61359
61473
  runtimeHint: extractRuntimeHint(memoryAgent?.runtimeConfig),
61360
61474
  pluginRunner: options.pluginRunner,
61361
61475
  cwd,
61362
- systemPrompt: reviewerSystemPrompt,
61476
+ systemPrompt: reviewerSystemPromptFinal,
61363
61477
  tools: "readonly",
61364
61478
  customTools: memoryTools,
61365
61479
  onText: agentLogger ? agentLogger.onText : (delta) => options.onText?.(delta),
@@ -62619,23 +62733,7 @@ Write the PROMPT.md directly using the write tool, then call \`fn_review_spec()\
62619
62733
  this.options = options;
62620
62734
  store.on("settings:updated", ({ settings, previous }) => {
62621
62735
  if (settings.globalPause && !previous.globalPause) {
62622
- for (const taskId of [...this.activeSubagentSessions.keys()]) {
62623
- this.disposeSubagentsForTask(taskId, "global pause");
62624
- }
62625
- for (const [taskId, session] of this.activeSessions) {
62626
- planLog.log(
62627
- `Global pause \u2014 terminating triage session for ${taskId}`
62628
- );
62629
- this.pauseAborted.add(taskId);
62630
- this.options.stuckTaskDetector?.untrackTask(taskId);
62631
- const sessionWithAbort = session;
62632
- if (typeof sessionWithAbort.abort === "function") {
62633
- void sessionWithAbort.abort().catch((err) => {
62634
- planLog.warn(`Failed to abort triage session for ${taskId}: ${err}`);
62635
- });
62636
- }
62637
- session.dispose();
62638
- }
62736
+ this.abortAndDisposeActiveSessions("global pause");
62639
62737
  }
62640
62738
  });
62641
62739
  store.on("settings:updated", ({ settings, previous }) => {
@@ -62704,8 +62802,35 @@ Write the PROMPT.md directly using the write tool, then call \`fn_review_spec()\
62704
62802
  this.pollInterval = null;
62705
62803
  this.activePollMs = null;
62706
62804
  }
62805
+ this.abortAndDisposeActiveSessions("engine stop");
62707
62806
  planLog.log("Processor stopped");
62708
62807
  }
62808
+ /**
62809
+ * Abort and dispose every active specify session and reviewer subagent.
62810
+ * Used by the global-pause handler and by `stop()`.
62811
+ *
62812
+ * Reviewer subagents are torn down first so they don't keep streaming
62813
+ * verdicts while the main triage session is being disposed. abort()
62814
+ * interrupts any in-flight LLM stream / tool call; dispose() then
62815
+ * releases session resources.
62816
+ */
62817
+ abortAndDisposeActiveSessions(reason) {
62818
+ for (const taskId of [...this.activeSubagentSessions.keys()]) {
62819
+ this.disposeSubagentsForTask(taskId, reason);
62820
+ }
62821
+ for (const [taskId, session] of this.activeSessions) {
62822
+ planLog.log(`${reason} \u2014 terminating triage session for ${taskId}`);
62823
+ this.pauseAborted.add(taskId);
62824
+ this.options.stuckTaskDetector?.untrackTask(taskId);
62825
+ const sessionWithAbort = session;
62826
+ if (typeof sessionWithAbort.abort === "function") {
62827
+ void sessionWithAbort.abort().catch((err) => {
62828
+ planLog.warn(`Failed to abort triage session for ${taskId}: ${err}`);
62829
+ });
62830
+ }
62831
+ session.dispose();
62832
+ }
62833
+ }
62709
62834
  /**
62710
62835
  * Mark a task as stuck-aborted so the catch block knows not to treat
62711
62836
  * the disposed session as a genuine failure.
@@ -62999,6 +63124,17 @@ Write the PROMPT.md directly using the write tool, then call \`fn_review_spec()\
62999
63124
  resolveAgentPrompt("triage", settings.agentPrompts) || (isFast ? FAST_TRIAGE_SYSTEM_PROMPT : TRIAGE_SYSTEM_PROMPT),
63000
63125
  triageInstructions
63001
63126
  );
63127
+ const triageContributions = this.options.pluginRunner?.getPromptContributionsForSurface("triage") ?? [];
63128
+ if (triageContributions.length > 0) {
63129
+ planLog.log(`${task.id}: applied ${triageContributions.length} plugin prompt contributions for triage surface`);
63130
+ }
63131
+ const triagePluginContributions = buildPluginPromptSection(
63132
+ "triage",
63133
+ this.options.pluginRunner
63134
+ );
63135
+ const triageSystemPromptFinal = triagePluginContributions ? `${triageSystemPrompt}
63136
+
63137
+ ${triagePluginContributions}` : triageSystemPrompt;
63002
63138
  const skillContext = await buildSessionSkillContext({
63003
63139
  agentStore: this.options.agentStore,
63004
63140
  task,
@@ -63011,7 +63147,7 @@ Write the PROMPT.md directly using the write tool, then call \`fn_review_spec()\
63011
63147
  runtimeHint: triageRuntimeHint,
63012
63148
  pluginRunner: this.options.pluginRunner,
63013
63149
  cwd: this.rootDir,
63014
- systemPrompt: triageSystemPrompt,
63150
+ systemPrompt: triageSystemPromptFinal,
63015
63151
  tools: "coding",
63016
63152
  customTools,
63017
63153
  onText: agentLogger.onText,
@@ -63175,7 +63311,7 @@ Write the PROMPT.md directly using the write tool, then call \`fn_review_spec()\
63175
63311
  runtimeHint: triageRuntimeHint,
63176
63312
  pluginRunner: this.options.pluginRunner,
63177
63313
  cwd: this.rootDir,
63178
- systemPrompt: triageSystemPrompt,
63314
+ systemPrompt: triageSystemPromptFinal,
63179
63315
  tools: "coding",
63180
63316
  customTools,
63181
63317
  onText: agentLogger.onText,
@@ -64749,6 +64885,7 @@ Do not refactor, rename broadly, or make opportunistic improvements.
64749
64885
  taskTitle: taskForSkillContext?.title
64750
64886
  })
64751
64887
  });
64888
+ options.onSession?.(session);
64752
64889
  const runId = mergeRunContext?.runId;
64753
64890
  const agentId = mergeRunContext?.agentId ?? "merger";
64754
64891
  await store.logEntry(
@@ -64849,50 +64986,322 @@ function resetMergeWithWarn(rootDir, taskId, label) {
64849
64986
  mergerLog.warn(`${taskId}: git reset --merge cleanup failed during ${label}: ${msg}`);
64850
64987
  }
64851
64988
  }
64852
- async function stashUnrelatedRootDirChanges(rootDir, taskId) {
64989
+ async function listOrphanedAutostashes(rootDir) {
64853
64990
  try {
64854
- const dirty = await snapshotDirtyFiles(rootDir);
64855
- if (dirty.size === 0) return null;
64856
- const label = `fusion-merger-autostash:${taskId}:${Date.now()}`;
64857
- await execAsync2(
64858
- `git stash push -u -m "${label}"`,
64859
- { cwd: rootDir }
64860
- );
64861
64991
  const { stdout } = await execAsync2(
64862
- `git stash list --format="%gd %s"`,
64992
+ `git stash list --format="%H %gd %s"`,
64863
64993
  { cwd: rootDir, encoding: "utf-8" }
64864
64994
  );
64865
- const lines = String(stdout).split("\n");
64866
- const match = lines.find((line) => line.includes(label));
64867
- if (!match) {
64995
+ const lines = String(stdout).split("\n").map((l) => l.trim()).filter(Boolean);
64996
+ const orphans = [];
64997
+ for (const line of lines) {
64998
+ const idx = line.indexOf(AUTOSTASH_LABEL_PREFIX);
64999
+ if (idx === -1) continue;
65000
+ const parts = line.split(/\s+/);
65001
+ const sha = parts[0] ?? "";
65002
+ const ref = parts[1] ?? "";
65003
+ const label = line.slice(idx);
65004
+ if (sha && ref) orphans.push({ sha, ref, label });
65005
+ }
65006
+ return orphans;
65007
+ } catch {
65008
+ return [];
65009
+ }
65010
+ }
65011
+ async function stashUnrelatedRootDirChanges(rootDir, taskId) {
65012
+ try {
65013
+ const orphans = await listOrphanedAutostashes(rootDir);
65014
+ if (orphans.length > 0) {
65015
+ const refs = orphans.map((o) => `${o.ref}@${o.sha.slice(0, 7)}`).join(", ");
64868
65016
  mergerLog.warn(
64869
- `${taskId}: created autostash but could not locate it in stash list \u2014 leaving in place to avoid data loss`
65017
+ `${taskId}: ${orphans.length} orphaned fusion-merger-autostash entry(ies) in stash list (${refs}) \u2014 these are uncommitted dev changes from prior merges whose restore failed. Recover with: cd ${rootDir} && git stash list && git stash apply <sha>`
64870
65018
  );
65019
+ }
65020
+ } catch {
65021
+ }
65022
+ try {
65023
+ const dirty = await snapshotDirtyFiles(rootDir);
65024
+ if (dirty.size === 0) return null;
65025
+ const label = `${AUTOSTASH_LABEL_PREFIX}${taskId}:${Date.now()}`;
65026
+ await execAsync2("git add -A", { cwd: rootDir });
65027
+ const { stdout: createOut } = await execAsync2("git stash create", {
65028
+ cwd: rootDir,
65029
+ encoding: "utf-8"
65030
+ });
65031
+ const sha = String(createOut).trim();
65032
+ if (!sha) {
65033
+ await execAsync2("git reset", { cwd: rootDir }).catch(() => void 0);
64871
65034
  return null;
64872
65035
  }
64873
- const ref = match.split(/\s+/)[0] ?? null;
65036
+ await execAsync2(
65037
+ `git stash store -m ${quoteArg(label)} ${sha}`,
65038
+ { cwd: rootDir }
65039
+ );
65040
+ await execAsync2("git reset --hard HEAD", { cwd: rootDir });
65041
+ await execAsync2("git clean -fd", { cwd: rootDir });
64874
65042
  mergerLog.log(
64875
- `${taskId}: stashed ${dirty.size} unrelated dirty path(s) in rootDir as ${ref} (${label})`
65043
+ `${taskId}: stashed ${dirty.size} unrelated dirty path(s) in rootDir as ${sha.slice(0, 7)} (${label})`
64876
65044
  );
64877
- return ref;
65045
+ return { sha, label };
64878
65046
  } catch (err) {
64879
65047
  const msg = err instanceof Error ? err.message : String(err);
64880
65048
  mergerLog.warn(
64881
65049
  `${taskId}: pre-merge autostash failed (${msg}) \u2014 proceeding without stash; concurrent dev edits in rootDir may be wiped`
64882
65050
  );
65051
+ try {
65052
+ await execAsync2("git reset", { cwd: rootDir });
65053
+ } catch {
65054
+ }
64883
65055
  return null;
64884
65056
  }
64885
65057
  }
64886
- async function restoreUnrelatedRootDirChanges(rootDir, taskId, stashRef) {
65058
+ async function findStashRefBySha(rootDir, sha) {
64887
65059
  try {
64888
- await execAsync2(`git stash pop "${stashRef}"`, { cwd: rootDir });
64889
- mergerLog.log(`${taskId}: restored autostash ${stashRef}`);
65060
+ const { stdout } = await execAsync2(
65061
+ `git stash list --format="%H %gd"`,
65062
+ { cwd: rootDir, encoding: "utf-8" }
65063
+ );
65064
+ for (const line of String(stdout).split("\n")) {
65065
+ const trimmed = line.trim();
65066
+ if (!trimmed) continue;
65067
+ const [entrySha, ref] = trimmed.split(/\s+/);
65068
+ if (entrySha === sha && ref) return ref;
65069
+ }
65070
+ return null;
65071
+ } catch {
65072
+ return null;
65073
+ }
65074
+ }
65075
+ async function dropAutostashBySha(rootDir, taskId, sha) {
65076
+ const ref = await findStashRefBySha(rootDir, sha);
65077
+ if (!ref) {
65078
+ mergerLog.log(`${taskId}: autostash ${sha.slice(0, 7)} no longer in stash list (already dropped)`);
65079
+ return;
65080
+ }
65081
+ try {
65082
+ await execAsync2(`git stash drop ${ref}`, { cwd: rootDir });
64890
65083
  } catch (err) {
64891
65084
  const msg = err instanceof Error ? err.message : String(err);
65085
+ mergerLog.warn(`${taskId}: failed to drop autostash ${ref} (${msg}) \u2014 harmless, will linger in stash list`);
65086
+ }
65087
+ }
65088
+ async function runAiAgentForAutostashConflict(params) {
65089
+ const { store, rootDir, taskId, conflictedFiles, options, settings } = params;
65090
+ const agentLogger = new AgentLogger({
65091
+ store,
65092
+ taskId,
65093
+ agent: "merger",
65094
+ persistAgentToolOutput: settings.persistAgentToolOutput,
65095
+ onAgentText: options.onAgentText ? (_id, delta) => options.onAgentText(delta) : void 0,
65096
+ onAgentTool: options.onAgentTool ? (_id, name) => options.onAgentTool(name) : void 0
65097
+ });
65098
+ let taskForSkillContext = null;
65099
+ let skillContext = void 0;
65100
+ if (options.agentStore) {
65101
+ try {
65102
+ taskForSkillContext = await store.getTask(taskId);
65103
+ skillContext = await buildSessionSkillContext({
65104
+ agentStore: options.agentStore,
65105
+ task: taskForSkillContext,
65106
+ sessionPurpose: "merger",
65107
+ projectRootDir: rootDir,
65108
+ pluginRunner: options.pluginRunner
65109
+ });
65110
+ } catch {
65111
+ }
65112
+ }
65113
+ const assignedAgentId = taskForSkillContext?.assignedAgentId?.trim();
65114
+ const agentStoreWithGetAgent = options.agentStore && typeof options.agentStore.getAgent === "function" ? options.agentStore : null;
65115
+ const assignedAgent = assignedAgentId && agentStoreWithGetAgent ? await agentStoreWithGetAgent.getAgent(assignedAgentId).catch(() => null) : null;
65116
+ const mergerRuntimeHint = extractRuntimeHint(assignedAgent?.runtimeConfig);
65117
+ const systemPrompt = `You are an autostash-conflict resolution agent running after a Fusion merge has already committed on the main branch.
65118
+
65119
+ Before the merge ran, the developer had uncommitted local changes in their working tree. The merger snapshotted those changes into a git stash, ran the merge cleanly, and is now reapplying the stash on top of the merged HEAD. The reapply hit conflicts because the merge committed changes that overlap the developer's stashed edits.
65120
+
65121
+ ## Your job
65122
+ Edit the conflicted files in place to remove every conflict marker (\`<<<<<<<\`, \`=======\`, \`>>>>>>>\`) and produce a coherent merged result that:
65123
+ - Preserves the developer's intended uncommitted changes (the "Updated upstream" / branch-side, depending on which side the stash pop wrote)
65124
+ - Layers them onto the merged HEAD content (the other side)
65125
+
65126
+ ## Rules
65127
+ 1. Read each conflicted file carefully before editing
65128
+ 2. Resolve every conflict marker \u2014 none may remain after you finish
65129
+ 3. Do NOT make any git commits. Do NOT run \`git add\` or \`git stash drop\`. Just edit the files.
65130
+ 4. Do NOT touch files that are not in the conflicted-files list
65131
+ 5. If you genuinely cannot determine the right resolution for a hunk, prefer the developer's stashed edits (their work is the unsaved context) and add a brief \`// TODO(autostash-conflict)\` comment so they can review
65132
+
65133
+ The orchestrator will verify post-run that no conflict markers remain. If any do, this attempt is treated as a failure and the stash is left intact for manual recovery.`;
65134
+ const fileList = conflictedFiles.map((f) => `- ${f}`).join("\n");
65135
+ const prompt = `Resolve autostash apply conflicts for task ${taskId}.
65136
+
65137
+ ## Conflicted files
65138
+ ${fileList}
65139
+
65140
+ ## Steps
65141
+ 1. For each file above, read its current contents (it has conflict markers from the failed \`git stash apply\`)
65142
+ 2. Edit it to a clean state with no conflict markers \u2014 preserving the developer's intended changes layered on top of the merged HEAD
65143
+ 3. After all files are clean, you are done. Do NOT commit or run git stash commands.`;
65144
+ mergerLog.log(`${taskId}: starting autostash-conflict resolution agent (${conflictedFiles.length} file(s))`);
65145
+ const { session } = await createResolvedAgentSession({
65146
+ sessionPurpose: "merger",
65147
+ runtimeHint: mergerRuntimeHint,
65148
+ pluginRunner: options.pluginRunner,
65149
+ cwd: rootDir,
65150
+ systemPrompt,
65151
+ tools: "coding",
65152
+ onText: agentLogger.onText,
65153
+ onThinking: agentLogger.onThinking,
65154
+ onToolStart: agentLogger.onToolStart,
65155
+ onToolEnd: agentLogger.onToolEnd,
65156
+ defaultProvider: settings.defaultProviderOverride && settings.defaultModelIdOverride ? settings.defaultProviderOverride : settings.defaultProvider,
65157
+ defaultModelId: settings.defaultProviderOverride && settings.defaultModelIdOverride ? settings.defaultModelIdOverride : settings.defaultModelId,
65158
+ fallbackProvider: settings.fallbackProvider,
65159
+ fallbackModelId: settings.fallbackModelId,
65160
+ defaultThinkingLevel: settings.defaultThinkingLevel,
65161
+ ...skillContext?.skillSelectionContext ? { skillSelection: skillContext.skillSelectionContext } : {},
65162
+ taskId,
65163
+ taskTitle: taskForSkillContext?.title,
65164
+ onFallbackModelUsed: createFallbackModelObserver({
65165
+ agent: "merger",
65166
+ label: "autostash conflict agent",
65167
+ store,
65168
+ taskId,
65169
+ taskTitle: taskForSkillContext?.title
65170
+ })
65171
+ });
65172
+ options.onSession?.(session);
65173
+ try {
65174
+ await store.appendAgentLog(
65175
+ taskId,
65176
+ `Autostash conflict agent started (model: ${describeModel(session)}, files: ${conflictedFiles.length})`,
65177
+ "text",
65178
+ void 0,
65179
+ "merger"
65180
+ );
65181
+ await withRateLimitRetry(async () => {
65182
+ throwIfAborted(options.signal, taskId);
65183
+ await promptWithFallback(session, prompt);
65184
+ checkSessionError(session);
65185
+ }, {
65186
+ onRetry: (attempt, delayMs, error) => {
65187
+ const delaySec = Math.round(delayMs / 1e3);
65188
+ mergerLog.warn(`\u23F3 ${taskId} autostash-conflict agent rate limited \u2014 retry ${attempt} in ${delaySec}s: ${error.message}`);
65189
+ },
65190
+ signal: options.signal
65191
+ });
65192
+ return { success: true };
65193
+ } catch (err) {
65194
+ const msg = err instanceof Error ? err.message : String(err);
65195
+ mergerLog.warn(`${taskId}: autostash-conflict agent error: ${msg}`);
65196
+ await store.logEntry(taskId, "Autostash conflict agent encountered an error", msg);
65197
+ return { success: false, error: msg };
65198
+ } finally {
65199
+ try {
65200
+ session.dispose();
65201
+ } catch {
65202
+ }
65203
+ }
65204
+ }
65205
+ async function findFilesWithConflictMarkers(rootDir, files) {
65206
+ const stillConflicted = [];
65207
+ for (const file of files) {
65208
+ try {
65209
+ const fullPath = join30(rootDir, file);
65210
+ if (!existsSync24(fullPath)) continue;
65211
+ const { stdout } = await execAsync2(
65212
+ `git grep -l -e "^<<<<<<< " -e "^=======$" -e "^>>>>>>> " --no-index -- ${quoteArg(fullPath)}`,
65213
+ { cwd: rootDir, encoding: "utf-8" }
65214
+ ).catch(() => ({ stdout: "" }));
65215
+ if (String(stdout).trim()) stillConflicted.push(file);
65216
+ } catch {
65217
+ }
65218
+ }
65219
+ return stillConflicted;
65220
+ }
65221
+ async function restoreUnrelatedRootDirChanges(rootDir, taskId, handle, ctx) {
65222
+ const { sha } = handle;
65223
+ let applyConflicted = false;
65224
+ try {
65225
+ await execAsync2(`git stash apply ${sha}`, { cwd: rootDir });
65226
+ } catch (err) {
65227
+ const msg = err instanceof Error ? err.message : String(err);
65228
+ const conflicted = await getConflictedFiles(rootDir);
65229
+ if (conflicted.length === 0) {
65230
+ mergerLog.warn(
65231
+ `${taskId}: failed to apply autostash ${sha.slice(0, 7)} (${msg}) \u2014 stash left intact; recover with: cd ${rootDir} && git stash apply ${sha}`
65232
+ );
65233
+ return { status: "failed", stashSha: sha, errorMessage: msg };
65234
+ }
65235
+ applyConflicted = true;
64892
65236
  mergerLog.warn(
64893
- `${taskId}: failed to pop autostash ${stashRef} (${msg}) \u2014 stash left intact; recover with: cd ${rootDir} && git stash list && git stash pop ${stashRef}`
65237
+ `${taskId}: autostash apply hit conflict in ${conflicted.length} file(s): ${conflicted.join(", ")}`
64894
65238
  );
64895
65239
  }
65240
+ if (!applyConflicted) {
65241
+ mergerLog.log(`${taskId}: restored autostash ${sha.slice(0, 7)} cleanly`);
65242
+ await dropAutostashBySha(rootDir, taskId, sha);
65243
+ return { status: "restored", stashSha: sha };
65244
+ }
65245
+ const conflictedFiles = await getConflictedFiles(rootDir);
65246
+ const smartConflictResolution = (ctx.settings.smartConflictResolution ?? ctx.settings.autoResolveConflicts) !== false;
65247
+ if (!smartConflictResolution) {
65248
+ const message = `Autostash apply conflicted in ${conflictedFiles.length} file(s) and smartConflictResolution is disabled. Stash ${sha.slice(0, 7)} left intact; resolve manually with: cd ${rootDir} && # edit files, then git stash drop <ref>`;
65249
+ mergerLog.warn(`${taskId}: ${message}`);
65250
+ return {
65251
+ status: "conflict-needs-manual",
65252
+ stashSha: sha,
65253
+ conflictedFiles,
65254
+ message
65255
+ };
65256
+ }
65257
+ await ctx.store.logEntry(
65258
+ taskId,
65259
+ `Autostash apply conflicted in ${conflictedFiles.length} file(s) \u2014 invoking AI to resolve`,
65260
+ conflictedFiles.join("\n")
65261
+ );
65262
+ const aiResult = await runAiAgentForAutostashConflict({
65263
+ store: ctx.store,
65264
+ rootDir,
65265
+ taskId,
65266
+ conflictedFiles,
65267
+ options: ctx.options,
65268
+ settings: ctx.settings
65269
+ });
65270
+ if (!aiResult.success) {
65271
+ const message = `Autostash apply conflict, AI resolution failed (${aiResult.error ?? "unknown error"}). Stash ${sha.slice(0, 7)} left intact; recover with: cd ${rootDir} && git status (conflicts in working tree) && # resolve, then git stash drop <ref>`;
65272
+ mergerLog.warn(`${taskId}: ${message}`);
65273
+ return {
65274
+ status: "conflict-needs-manual",
65275
+ stashSha: sha,
65276
+ conflictedFiles,
65277
+ message
65278
+ };
65279
+ }
65280
+ const stillConflicted = await findFilesWithConflictMarkers(rootDir, conflictedFiles);
65281
+ if (stillConflicted.length > 0) {
65282
+ const message = `AI agent reported success but conflict markers remain in: ${stillConflicted.join(", ")}. Stash ${sha.slice(0, 7)} left intact; recover manually.`;
65283
+ mergerLog.warn(`${taskId}: ${message}`);
65284
+ return {
65285
+ status: "conflict-needs-manual",
65286
+ stashSha: sha,
65287
+ conflictedFiles: stillConflicted,
65288
+ message
65289
+ };
65290
+ }
65291
+ mergerLog.log(
65292
+ `${taskId}: AI-resolved autostash conflict in ${conflictedFiles.length} file(s); dropping stash ${sha.slice(0, 7)}`
65293
+ );
65294
+ await ctx.store.logEntry(
65295
+ taskId,
65296
+ `Autostash conflict resolved by AI in ${conflictedFiles.length} file(s)`,
65297
+ conflictedFiles.join("\n")
65298
+ );
65299
+ await dropAutostashBySha(rootDir, taskId, sha);
65300
+ return {
65301
+ status: "ai-resolved",
65302
+ stashSha: sha,
65303
+ conflictedFiles
65304
+ };
64896
65305
  }
64897
65306
  async function generateAiMergeSummary(commitLog, diffStat, settings, rootDir) {
64898
65307
  try {
@@ -65661,6 +66070,7 @@ You are assisting with a paused \`git pull --rebase\`.
65661
66070
  taskId
65662
66071
  })
65663
66072
  });
66073
+ options?.onSession?.(session);
65664
66074
  const prompt = [
65665
66075
  `Resolve rebase conflicts for task ${taskId}.`,
65666
66076
  "",
@@ -65874,7 +66284,8 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
65874
66284
  if (mergeBlocker) {
65875
66285
  throw new Error(`Cannot merge ${taskId}: ${mergeBlocker}`);
65876
66286
  }
65877
- const autostashRef = await stashUnrelatedRootDirChanges(rootDir, taskId);
66287
+ const autostashHandle = await stashUnrelatedRootDirChanges(rootDir, taskId);
66288
+ let resultForFinally;
65878
66289
  try {
65879
66290
  const branch = task.branch || `fusion/${taskId.toLowerCase()}`;
65880
66291
  const sourceIssueRef = buildSourceIssueRef(task.sourceIssue);
@@ -65886,6 +66297,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
65886
66297
  worktreeRemoved: false,
65887
66298
  branchDeleted: false
65888
66299
  };
66300
+ resultForFinally = result;
65889
66301
  const mergeRunId = generateSyntheticRunId("merge", taskId);
65890
66302
  const engineRunContext = {
65891
66303
  runId: mergeRunId,
@@ -66840,7 +67252,8 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
66840
67252
  const pushResult = await pushToRemoteAfterMerge(store, rootDir, taskId, settings, {
66841
67253
  onAgentText: options.onAgentText,
66842
67254
  signal: options.signal,
66843
- runtimeHint: pushRuntimeHint
67255
+ runtimeHint: pushRuntimeHint,
67256
+ onSession: options.onSession
66844
67257
  });
66845
67258
  if (pushResult.pushed) {
66846
67259
  mergerLog.log(`${taskId}: pushed merged result to remote`);
@@ -66892,8 +67305,29 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
66892
67305
  await completeTask(store, taskId, result);
66893
67306
  return result;
66894
67307
  } finally {
66895
- if (autostashRef) {
66896
- await restoreUnrelatedRootDirChanges(rootDir, taskId, autostashRef);
67308
+ if (autostashHandle) {
67309
+ try {
67310
+ const settings = await store.getSettings();
67311
+ const outcome = await restoreUnrelatedRootDirChanges(
67312
+ rootDir,
67313
+ taskId,
67314
+ autostashHandle,
67315
+ { store, options, settings }
67316
+ );
67317
+ if (resultForFinally) {
67318
+ resultForFinally.autostash = outcome;
67319
+ }
67320
+ } catch (err) {
67321
+ const msg = err instanceof Error ? err.message : String(err);
67322
+ mergerLog.warn(`${taskId}: autostash restore threw unexpectedly (${msg}) \u2014 stash may be left in place; check git stash list`);
67323
+ if (resultForFinally) {
67324
+ resultForFinally.autostash = {
67325
+ status: "failed",
67326
+ stashSha: autostashHandle.sha,
67327
+ errorMessage: msg
67328
+ };
67329
+ }
67330
+ }
66897
67331
  }
66898
67332
  }
66899
67333
  }
@@ -67944,7 +68378,7 @@ async function completeTask(store, taskId, result) {
67944
68378
  result.task = task;
67945
68379
  store.emit("task:merged", result);
67946
68380
  }
67947
- var execAsync2, execFileAsync, LOCKFILE_PATTERNS, GENERATED_PATTERNS, DEPENDENCY_SYNC_TRIGGER_PATTERNS, WORKFLOW_SCRIPT_OUTPUT_MAX_CHARS, PULL_REBASE_TIMEOUT_MS, PUSH_TIMEOUT_MS, MERGE_COMMIT_LOG_MAX_CHARS, MERGE_DIFF_STAT_MAX_CHARS, VerificationError, MergeAbortedError, VERIFICATION_EXTRA_ENV, FUSION_TASK_ID_TRAILER_KEY;
68381
+ var execAsync2, execFileAsync, LOCKFILE_PATTERNS, GENERATED_PATTERNS, DEPENDENCY_SYNC_TRIGGER_PATTERNS, WORKFLOW_SCRIPT_OUTPUT_MAX_CHARS, PULL_REBASE_TIMEOUT_MS, PUSH_TIMEOUT_MS, MERGE_COMMIT_LOG_MAX_CHARS, MERGE_DIFF_STAT_MAX_CHARS, VerificationError, MergeAbortedError, VERIFICATION_EXTRA_ENV, AUTOSTASH_LABEL_PREFIX, FUSION_TASK_ID_TRAILER_KEY;
67948
68382
  var init_merger = __esm({
67949
68383
  "../engine/src/merger.ts"() {
67950
68384
  "use strict";
@@ -68026,6 +68460,7 @@ var init_merger = __esm({
68026
68460
  ["FUSION_TEST_WORKSPACE_CONCURRENCY", "4"]
68027
68461
  ].filter(([key]) => !(key in process.env))
68028
68462
  );
68463
+ AUTOSTASH_LABEL_PREFIX = "fusion-merger-autostash:";
68029
68464
  FUSION_TASK_ID_TRAILER_KEY = "Fusion-Task-Id";
68030
68465
  }
68031
68466
  });
@@ -69950,7 +70385,7 @@ function buildSourceIssueRef2(sourceIssue) {
69950
70385
  }
69951
70386
  return `${sourceIssue.repository}#${issueNumber}`;
69952
70387
  }
69953
- function buildExecutionPrompt(task, rootDir, settings, worktreePath) {
70388
+ function buildExecutionPrompt(task, rootDir, settings, worktreePath, pluginRunner) {
69954
70389
  const prompt = scopePromptToWorktree2(task.prompt, rootDir, worktreePath);
69955
70390
  const reviewMatch = prompt.match(/##\s*Review Level[:\s]*(\d)/);
69956
70391
  const reviewLevel = reviewMatch ? parseInt(reviewMatch[1], 10) : 0;
@@ -70023,6 +70458,11 @@ git log --oneline
70023
70458
  }
70024
70459
  steeringSection = lines.join("\n");
70025
70460
  }
70461
+ const taskPromptContributions = pluginRunner?.getPromptContributionsForSurface("executor-task") ?? [];
70462
+ if (taskPromptContributions.length > 0) {
70463
+ executorLog.log(`${task.id}: applied ${taskPromptContributions.length} plugin prompt contributions for executor-task surface`);
70464
+ }
70465
+ const pluginTaskContributions = buildPluginPromptSection("executor-task", pluginRunner);
70026
70466
  return `Execute this task.
70027
70467
 
70028
70468
  ## Task: ${task.id}
@@ -70041,6 +70481,10 @@ ${reviewLevel >= 1 ? `Before implementing each step (except Step 0 and the final
70041
70481
  ${reviewLevel >= 2 ? `After implementing + committing each step, call:
70042
70482
  \`fn_review_step(step=N, type="code", step_name="...", baseline="<SHA from before step>")\`` : ""}
70043
70483
  ${reviewLevel >= 3 ? `After tests, also call fn_review_step with type="code" for test review.` : ""}
70484
+ ${pluginTaskContributions ? `
70485
+
70486
+ ${pluginTaskContributions}
70487
+ ` : ""}
70044
70488
 
70045
70489
  ## Worktree Boundaries
70046
70490
 
@@ -71417,6 +71861,41 @@ The tool prevents your session from being killed by the inactivity watchdog duri
71417
71861
  return false;
71418
71862
  }
71419
71863
  }
71864
+ /**
71865
+ * Returns true when execute() should be deferred because the agent bound to
71866
+ * this task has an active heartbeat run and allowParallelExecution=false.
71867
+ *
71868
+ * Only applies to permanent (non-ephemeral) agents. Always returns false
71869
+ * when agentStore is unavailable or the agent cannot be resolved.
71870
+ */
71871
+ async shouldDeferForHeartbeat(agentId) {
71872
+ if (!this.options.agentStore) return false;
71873
+ const agent = await this.options.agentStore.getAgent(agentId).catch(() => null);
71874
+ if (!agent) return false;
71875
+ if (isEphemeralAgent(agent)) return false;
71876
+ const rc = agent.runtimeConfig ?? {};
71877
+ if (rc.allowParallelExecution !== false) return false;
71878
+ const activeRun = await this.options.agentStore.getActiveHeartbeatRun(agentId).catch(() => null);
71879
+ return activeRun !== null;
71880
+ }
71881
+ /**
71882
+ * Re-dispatch execute() for any unstarted in-progress task belonging to the
71883
+ * given agent. Called after a heartbeat run completes to unblock tasks that
71884
+ * were deferred by the allowParallelExecution=false gate.
71885
+ */
71886
+ async resumeTaskForAgent(agentId) {
71887
+ const settings = await this.store.getSettings();
71888
+ if (settings.globalPause || settings.enginePaused) return;
71889
+ const tasks = await this.store.listTasks({ slim: true, column: "in-progress" });
71890
+ for (const task of tasks) {
71891
+ if (task.assignedAgentId === agentId && !task.paused && !this.executing.has(task.id) && !this.activeSessions.has(task.id) && !this.activeStepExecutors.has(task.id) && !this.activeWorkflowStepSessions.has(task.id)) {
71892
+ executorLog.log(`${task.id}: re-dispatching execute() after heartbeat completion for agent ${agentId}`);
71893
+ this.execute(task).catch(
71894
+ (err) => executorLog.error(`Failed to resume ${task.id} after heartbeat completion:`, err)
71895
+ );
71896
+ }
71897
+ }
71898
+ }
71420
71899
  /**
71421
71900
  * Resume orphaned in-progress tasks (e.g., after crash/restart).
71422
71901
  * Call once after engine startup.
@@ -71508,28 +71987,6 @@ The tool prevents your session from being killed by the inactivity watchdog duri
71508
71987
  }
71509
71988
  return "";
71510
71989
  }
71511
- resolveDependencyWorktree(task, allTasks) {
71512
- if (task.dependencies.length === 0) return null;
71513
- for (const depId of task.dependencies) {
71514
- const dep = allTasks.find((t) => t.id === depId);
71515
- if (dep && dep.worktree && (dep.column === "done" || dep.column === "in-review") && existsSync29(dep.worktree)) {
71516
- return dep.worktree;
71517
- }
71518
- }
71519
- return null;
71520
- }
71521
- /**
71522
- * Reuse an existing worktree directory from a dependency task.
71523
- * Instead of creating a new worktree with `git worktree add`, this creates
71524
- * a new branch in the existing worktree via `git checkout -b`. The worktree
71525
- * directory (and its build caches) are preserved.
71526
- */
71527
- async reuseWorktree(branch, worktreePath) {
71528
- await execAsync5(`git checkout -b "${branch}"`, {
71529
- cwd: worktreePath
71530
- });
71531
- executorLog.log(`Reused worktree at ${worktreePath}, created branch ${branch}`);
71532
- }
71533
71990
  /**
71534
71991
  * Execute a task in an isolated git worktree.
71535
71992
  *
@@ -71543,6 +72000,11 @@ The tool prevents your session from being killed by the inactivity watchdog duri
71543
72000
  async execute(task) {
71544
72001
  executorLog.log(`execute() called for ${task.id} (already executing=${this.executing.has(task.id)})`);
71545
72002
  if (this.executing.has(task.id)) return;
72003
+ const assignedAgentId = task.assignedAgentId;
72004
+ if (assignedAgentId && await this.shouldDeferForHeartbeat(assignedAgentId)) {
72005
+ executorLog.log(`${task.id}: skipping execute \u2014 agent ${assignedAgentId} has active heartbeat run (allowParallelExecution=false)`);
72006
+ return;
72007
+ }
71546
72008
  this.executing.add(task.id);
71547
72009
  executorLog.log(`Starting ${task.id}: ${task.title || task.description.slice(0, 60)}`);
71548
72010
  const settings = await this.store.getSettings();
@@ -72132,9 +72594,9 @@ ${summary}`,
72132
72594
  const sessionRef = { current: null };
72133
72595
  const stepCheckpoints = /* @__PURE__ */ new Map();
72134
72596
  const stuckDetector = this.options.stuckTaskDetector;
72135
- const assignedAgentId = detail.assignedAgentId?.trim();
72136
- const reflectionTools = this.options.reflectionService && settings.reflectionEnabled && assignedAgentId ? [createReflectOnPerformanceTool(this.options.reflectionService, assignedAgentId)] : [];
72137
- const assignedAgent = assignedAgentId && this.options.agentStore ? await this.options.agentStore.getAgent(assignedAgentId).catch(() => null) : null;
72597
+ const assignedAgentId2 = detail.assignedAgentId?.trim();
72598
+ const reflectionTools = this.options.reflectionService && settings.reflectionEnabled && assignedAgentId2 ? [createReflectOnPerformanceTool(this.options.reflectionService, assignedAgentId2)] : [];
72599
+ const assignedAgent = assignedAgentId2 && this.options.agentStore ? await this.options.agentStore.getAgent(assignedAgentId2).catch(() => null) : null;
72138
72600
  const executorRuntimeHint = extractRuntimeHint(assignedAgent?.runtimeConfig);
72139
72601
  if (executionMode === "fast") {
72140
72602
  executorLog.log(`${task.id}: fast mode \u2014 fn_review_step tool not injected`);
@@ -72183,15 +72645,15 @@ ${summary}`,
72183
72645
  ...this.options.agentStore ? [
72184
72646
  createListAgentsTool(this.options.agentStore),
72185
72647
  createDelegateTaskTool(this.options.agentStore, this.store, { rootDir: this.rootDir }),
72186
- ...assignedAgentId ? [
72187
- createGetAgentConfigTool(this.options.agentStore, assignedAgentId),
72188
- createUpdateAgentConfigTool(this.options.agentStore, assignedAgentId)
72648
+ ...assignedAgentId2 ? [
72649
+ createGetAgentConfigTool(this.options.agentStore, assignedAgentId2),
72650
+ createUpdateAgentConfigTool(this.options.agentStore, assignedAgentId2)
72189
72651
  ] : []
72190
72652
  ] : [],
72191
72653
  // Messaging tools — allows executor agents to send and receive messages.
72192
- ...this.options.messageStore && assignedAgentId ? [
72193
- createSendMessageTool(this.options.messageStore, assignedAgentId),
72194
- createReadMessagesTool(this.options.messageStore, assignedAgentId)
72654
+ ...this.options.messageStore && assignedAgentId2 ? [
72655
+ createSendMessageTool(this.options.messageStore, assignedAgentId2),
72656
+ createReadMessagesTool(this.options.messageStore, assignedAgentId2)
72195
72657
  ] : [],
72196
72658
  // Add plugin tools from PluginRunner
72197
72659
  ...this.options.pluginRunner?.getPluginTools() ?? []
@@ -72229,12 +72691,23 @@ ${summary}`,
72229
72691
  getExecutorSystemPrompt(settings),
72230
72692
  executorInstructions
72231
72693
  );
72694
+ const executorSystemContributions = this.options.pluginRunner?.getPromptContributionsForSurface("executor-system") ?? [];
72695
+ if (executorSystemContributions.length > 0) {
72696
+ executorLog.log(`${task.id}: applied ${executorSystemContributions.length} plugin prompt contributions for executor-system surface`);
72697
+ }
72698
+ const executorPluginContributions = buildPluginPromptSection(
72699
+ "executor-system",
72700
+ this.options.pluginRunner
72701
+ );
72702
+ const executorSystemPromptFinal = executorPluginContributions ? `${executorSystemPrompt}
72703
+
72704
+ ${executorPluginContributions}` : executorSystemPrompt;
72232
72705
  let { session, sessionFile } = await createResolvedAgentSession({
72233
72706
  sessionPurpose: "executor",
72234
72707
  runtimeHint: executorRuntimeHint,
72235
72708
  pluginRunner: this.options.pluginRunner,
72236
72709
  cwd: worktreePath,
72237
- systemPrompt: executorSystemPrompt,
72710
+ systemPrompt: executorSystemPromptFinal,
72238
72711
  tools: "coding",
72239
72712
  customTools,
72240
72713
  onText: agentLogger.onText,
@@ -72295,7 +72768,13 @@ ${summary}`,
72295
72768
  "Review the current state of your worktree and proceed with the next pending step."
72296
72769
  ].join("\n"));
72297
72770
  } else {
72298
- const agentPrompt = buildExecutionPrompt(detail, this.rootDir, settings, worktreePath);
72771
+ const agentPrompt = buildExecutionPrompt(
72772
+ detail,
72773
+ this.rootDir,
72774
+ settings,
72775
+ worktreePath,
72776
+ this.options.pluginRunner
72777
+ );
72299
72778
  await promptWithFallback(session, agentPrompt);
72300
72779
  }
72301
72780
  checkSessionError(session);
@@ -72471,7 +72950,7 @@ ${summary}`,
72471
72950
  runtimeHint: executorRuntimeHint,
72472
72951
  pluginRunner: this.options.pluginRunner,
72473
72952
  cwd: worktreePath,
72474
- systemPrompt: executorSystemPrompt,
72953
+ systemPrompt: executorSystemPromptFinal,
72475
72954
  tools: "coding",
72476
72955
  customTools,
72477
72956
  onText: agentLogger.onText,
@@ -72521,7 +73000,7 @@ ${summary}`,
72521
73000
  "Do NOT ask for permission. Do NOT write a summary. Just call a tool and keep working.",
72522
73001
  "",
72523
73002
  "Original task:",
72524
- buildExecutionPrompt(detail, this.rootDir, settings, worktreePath)
73003
+ buildExecutionPrompt(detail, this.rootDir, settings, worktreePath, this.options.pluginRunner)
72525
73004
  ].join("\n");
72526
73005
  } else {
72527
73006
  retryPrompt = [
@@ -72531,7 +73010,7 @@ ${summary}`,
72531
73010
  "2. If there is remaining work, finish it and then call fn_task_done.",
72532
73011
  "",
72533
73012
  "Original task:",
72534
- buildExecutionPrompt(detail, this.rootDir, settings, worktreePath)
73013
+ buildExecutionPrompt(detail, this.rootDir, settings, worktreePath, this.options.pluginRunner)
72535
73014
  ].join("\n");
72536
73015
  }
72537
73016
  stuckDetector?.recordActivity(task.id);
@@ -73591,7 +74070,7 @@ ${failureContext.output.slice(0, VERIFICATION_LOG_MAX_CHARS)}
73591
74070
  return reRunResult.allPassed;
73592
74071
  } finally {
73593
74072
  await logger2.flush();
73594
- await session.dispose();
74073
+ session.dispose();
73595
74074
  }
73596
74075
  } catch (err) {
73597
74076
  const errorMessage = err instanceof Error ? err.message : String(err);
@@ -74310,7 +74789,7 @@ Review the work done in this worktree and evaluate it against the criteria in yo
74310
74789
  * like `fusion/fn-2729`) used purely for log messages. `depTip` is the
74311
74790
  * resolved SHA of the dep's tip — that's what gets squash-merged.
74312
74791
  */
74313
- async planSquashImportFromDep(taskId, depTip, originalStartPoint) {
74792
+ async planSquashImportFromDep(_taskId, depTip, originalStartPoint) {
74314
74793
  let settings;
74315
74794
  try {
74316
74795
  settings = await this.store.getSettings();
@@ -75199,7 +75678,7 @@ Review the work done in this worktree and evaluate it against the criteria in yo
75199
75678
  this.childSessions.delete(childId);
75200
75679
  }
75201
75680
  try {
75202
- await this.options.agentStore?.updateAgentState(childId, "terminated");
75681
+ await this.options.agentStore?.updateAgentState(childId, "paused");
75203
75682
  } catch (err) {
75204
75683
  const msg = err instanceof Error ? err.message : String(err);
75205
75684
  executorLog.warn(`Failed to update spawned child ${childId} state to 'terminated' during cleanup: ${msg}`);
@@ -78242,9 +78721,6 @@ function taskRelevanceScore(agent, task) {
78242
78721
  }
78243
78722
  return score;
78244
78723
  }
78245
- function isBlockedStateDuplicate(current, previous) {
78246
- return current.blockedBy === previous.blockedBy && current.contextHash === previous.contextHash;
78247
- }
78248
78724
  function truncatePrompt(text, maxChars) {
78249
78725
  if (text.length <= maxChars) return text;
78250
78726
  return `${text.slice(0, maxChars)}
@@ -78304,28 +78780,42 @@ var init_agent_heartbeat = __esm({
78304
78780
 
78305
78781
  ## Your Role
78306
78782
 
78307
- You are a lightweight periodic checker in the broader Fusion system, not the primary implementation agent.
78308
- Your purpose is to keep momentum: detect issues early, surface blockers, and route work to the right place.
78309
- Think in single-pass interventions, not long coding sessions.
78783
+ This is an ambient heartbeat. Task implementation work (coding, running tests, making commits) runs in a separate
78784
+ execution path handled by the executor. Do NOT do task body work or implementation in this heartbeat.
78785
+
78786
+ Your purpose is to keep momentum through coordination: surface blockers, respond to messages, manage memory,
78787
+ delegate, and route work to the right place. Think in single-pass interventions, not coding sessions.
78310
78788
 
78311
78789
  Your job:
78312
- 1. Check your assigned task \u2014 read the description and PROMPT.md if present.
78313
- 2. Do ONE useful action that changes project clarity or flow.
78790
+ 1. Check your assigned task context \u2014 review its state, blockedBy field, and any new comments.
78791
+ 2. Do ONE useful coordination action.
78314
78792
  3. Use fn_task_create to spawn follow-up work, fn_task_log to record observations, and fn_task_document_write for durable artifacts.
78315
78793
  4. Use fn_list_agents + fn_delegate_task when work should be assigned to a specific capable agent now.
78316
78794
  5. Use fn_get_agent_config and fn_update_agent_config to tune direct reports before delegating recurring work.
78317
- 5. Call fn_heartbeat_done when finished with an optional summary of what was accomplished.
78795
+ 6. Call fn_heartbeat_done when finished with an optional summary of what was accomplished.
78318
78796
 
78319
- Examples of ONE useful action:
78320
- - DO: summarize a blocker in fn_task_log with concrete next step(s).
78797
+ **If your bound task is blocked** (blockedBy is set in the task context):
78798
+ - Surface the blocker concretely with fn_task_log.
78799
+ - Chase the dependency: comment on the blocking task, send a message to the responsible agent, or ping an owner.
78800
+ - Look for unblocking work you can spawn or delegate right now.
78801
+ - Pivot to other relevant coordination work if the blocker cannot be immediately resolved.
78802
+
78803
+ **If your bound task is not blocked:**
78804
+ - Surface progress, status, or coordination needs with fn_task_log or fn_task_document_write.
78805
+ - Create follow-up tasks for discovered risks or gaps.
78806
+ - Respond to new steering comments or user messages.
78807
+
78808
+ Examples of ONE useful coordination action:
78809
+ - DO: log a concrete blocker with next steps and message the agent responsible for unblocking.
78321
78810
  - DO: create a focused follow-up task when a missing dependency is discovered.
78322
78811
  - DO: delegate a well-scoped task to an appropriate idle specialist agent.
78323
78812
  - DO: save a short investigation note with fn_task_document_write when the analysis is reusable.
78324
- - DON'T: attempt full implementation, broad refactors, or multi-hour coding.
78813
+ - DON'T: attempt full implementation, run tests, commit code, or do multi-step coding work.
78325
78814
  - DON'T: create vague tasks like "investigate stuff" without actionable scope.
78326
78815
 
78327
- Keep work lightweight \u2014 this is a single-pass check, not a full implementation run.
78328
- You have coding-capable workspace tools (read/write/edit/bash within worktree boundaries) plus fn_task_create, fn_task_log, and fn_task_document tools.
78816
+ Keep work lightweight \u2014 this is a single-pass coordination check, not an implementation run.
78817
+ You have workspace read tools (for context gathering) plus fn_task_create, fn_task_log, fn_task_document tools,
78818
+ fn_send_message, fn_read_messages, fn_list_agents, fn_delegate_task, and memory tools.
78329
78819
 
78330
78820
  **Task Documents:** Save important findings with fn_task_document_write(key="...", content="...").
78331
78821
  Documents persist across sessions and are visible in the dashboard's Documents tab.
@@ -78344,7 +78834,8 @@ Prefer fn_delegate_task when immediate ownership by a specific agent materially
78344
78834
 
78345
78835
  ## Common Patterns
78346
78836
 
78347
- - **Stuck task:** log the concrete blocker, create a narrowly scoped unblocker task if needed, and optionally message the responsible agent.
78837
+ - **Blocked task:** log the concrete blocker, chase the dependency via fn_send_message, create a narrowly scoped unblocker task if needed.
78838
+ - **Stuck task with no blockedBy:** log the observation and create a follow-up task to investigate the root cause.
78348
78839
  - **Completed task with follow-up risk:** create explicit follow-up task(s) for residual risk instead of burying notes in a long log.
78349
78840
  - **New user/agent comments:** summarize what changed, identify required action, and route via task creation/delegation.
78350
78841
  - **Dependency drift:** log the mismatch and create reconciliation tasks with clear dependencies.
@@ -78415,7 +78906,7 @@ You have coding-capable workspace tools (read/write/edit/bash within worktree bo
78415
78906
  Use this decision rule:
78416
78907
  - **fn_task_create:** create executable work when ownership is not predetermined.
78417
78908
  - **fn_delegate_task:** assign immediately when a specific agent should own the work now.
78418
- - **fn_memory_append:** persist durable conventions/pitfalls; avoid transient run-by-run chatter.
78909
+ - **fn_memory_append:** use \`scope="agent"\` for your own operating context and \`scope="project"\` for repo-wide durable knowledge; avoid transient run-by-run chatter.
78419
78910
 
78420
78911
  If unsure who should do the work, prefer fn_task_create and let scheduler routing happen naturally.
78421
78912
 
@@ -78563,10 +79054,71 @@ not loop on the same plan across heartbeats without recording why.`;
78563
79054
  if (this.messageStore) {
78564
79055
  this.messageStore.setMessageToAgentHook(this.handleMessageToAgent.bind(this));
78565
79056
  }
79057
+ void this.reconcileOrphanedRunningAgents();
78566
79058
  this.pollInterval = setInterval(() => {
78567
79059
  void this.checkMissedHeartbeats();
78568
79060
  }, this.pollIntervalMs);
78569
79061
  }
79062
+ /**
79063
+ * Find agents in `state="running"` that are not actually running and flip
79064
+ * them to `"active"`. An agent is considered orphaned when either:
79065
+ * (a) it has no active heartbeat run record, or
79066
+ * (b) it is not in this monitor's in-memory tracked set AND its
79067
+ * lastHeartbeatAt is older than 3× the configured timeout.
79068
+ *
79069
+ * Case (a) covers historical bypass paths (governance-skip, supersede-on-
79070
+ * startRun, safety-net run termination) that ended the run record but
79071
+ * never propagated the agent-state transition. Case (b) covers a process
79072
+ * that crashed mid-run, leaving both the run row and the agent row stuck.
79073
+ *
79074
+ * Called on monitor start AND periodically from the polling loop to keep
79075
+ * the system self-healing across versions. Best-effort — failures are
79076
+ * logged but do not block the caller.
79077
+ */
79078
+ async reconcileOrphanedRunningAgents() {
79079
+ try {
79080
+ const runningAgents = await this.store.listAgents({ state: "running", includeEphemeral: true });
79081
+ const now = Date.now();
79082
+ for (const agent of runningAgents) {
79083
+ let reason = null;
79084
+ const activeRun = await this.store.getActiveHeartbeatRun(agent.id);
79085
+ if (!activeRun) {
79086
+ reason = "no active run";
79087
+ } else if (!this.trackedAgents.has(agent.id)) {
79088
+ const timeoutMs = this.resolveAgentConfig(agent.id).heartbeatTimeoutMs;
79089
+ const lastTs = agent.lastHeartbeatAt ? Date.parse(agent.lastHeartbeatAt) : NaN;
79090
+ const heartbeatAgeMs = Number.isFinite(lastTs) ? Math.max(0, now - lastTs) : Infinity;
79091
+ if (heartbeatAgeMs > timeoutMs * 3) {
79092
+ try {
79093
+ const detail = await this.store.getRunDetail(agent.id, activeRun.id);
79094
+ if (detail && detail.status !== "completed" && detail.status !== "failed" && detail.status !== "terminated") {
79095
+ await this.store.saveRun({
79096
+ ...detail,
79097
+ endedAt: (/* @__PURE__ */ new Date()).toISOString(),
79098
+ status: "terminated",
79099
+ stderrExcerpt: `Reconciled stale run (no heartbeat for ${formatDuration(heartbeatAgeMs)}; threshold ${formatDuration(timeoutMs * 3)})`
79100
+ });
79101
+ }
79102
+ await this.store.endHeartbeatRun(activeRun.id, "terminated");
79103
+ } catch (runEndErr) {
79104
+ heartbeatLog.warn(`Failed to terminate stale run ${activeRun.id} for ${agent.id}: ${runEndErr instanceof Error ? runEndErr.message : String(runEndErr)}`);
79105
+ }
79106
+ reason = `stale heartbeat (${formatDuration(heartbeatAgeMs)} since lastHeartbeatAt)`;
79107
+ }
79108
+ }
79109
+ if (!reason) continue;
79110
+ try {
79111
+ await this.store.updateAgentState(agent.id, "active");
79112
+ this.clearRunState(agent.id);
79113
+ heartbeatLog.log(`Reconciled orphaned running agent ${agent.id} \u2192 active (${reason})`);
79114
+ } catch (err) {
79115
+ heartbeatLog.warn(`Failed to reconcile orphaned running agent ${agent.id}: ${err instanceof Error ? err.message : String(err)}`);
79116
+ }
79117
+ }
79118
+ } catch (err) {
79119
+ heartbeatLog.warn(`reconcileOrphanedRunningAgents scan failed: ${err instanceof Error ? err.message : String(err)}`);
79120
+ }
79121
+ }
78570
79122
  /**
78571
79123
  * Stop the heartbeat monitoring loop.
78572
79124
  * Does not untrack agents - they remain in memory.
@@ -78754,7 +79306,7 @@ not loop on the same plan across heartbeats without recording why.`;
78754
79306
  await this.store.updateAgentState(agentId, "error");
78755
79307
  await this.store.updateAgent(agentId, { lastError: completionResult.stderrExcerpt ?? "Run failed" });
78756
79308
  } else if (completionResult.status === "terminated") {
78757
- await this.store.updateAgentState(agentId, "terminated");
79309
+ await this.store.updateAgentState(agentId, "paused");
78758
79310
  } else {
78759
79311
  await this.store.updateAgentState(agentId, "active");
78760
79312
  }
@@ -78763,6 +79315,9 @@ not loop on the same plan across heartbeats without recording why.`;
78763
79315
  }
78764
79316
  }
78765
79317
  await this.store.endHeartbeatRun(runId, completionResult.status === "completed" ? "completed" : "terminated");
79318
+ if (completionResult.status === "terminated") {
79319
+ this.onTerminated?.(agentId, completionResult.stderrExcerpt ?? "Run terminated");
79320
+ }
78766
79321
  this.onRunCompleted?.(agentId, completedRun);
78767
79322
  }
78768
79323
  /**
@@ -79288,45 +79843,8 @@ not loop on the same plan across heartbeats without recording why.`;
79288
79843
  });
79289
79844
  return await this.store.getRunDetail(agentId, run.id);
79290
79845
  }
79291
- const blockedBy = typeof liveTaskDetail.blockedBy === "string" ? liveTaskDetail.blockedBy.trim() : "";
79292
- const isBlockedTask = liveTaskDetail.status === "queued" && blockedBy.length > 0;
79293
- if (isBlockedTask) {
79294
- const commentCount = (liveTaskDetail.comments?.length ?? 0) + (liveTaskDetail.steeringComments?.length ?? 0);
79295
- const lastCommentId = liveTaskDetail.comments?.at(-1)?.id;
79296
- const lastSteeringCommentId = liveTaskDetail.steeringComments?.at(-1)?.id;
79297
- const contextHash = Buffer.from(
79298
- JSON.stringify({ commentCount, lastCommentId, lastSteeringCommentId, blockedBy })
79299
- ).toString("base64").slice(0, 16);
79300
- const currentBlockedState = {
79301
- taskId: resolvedTaskId2,
79302
- blockedBy,
79303
- recordedAt: (/* @__PURE__ */ new Date()).toISOString(),
79304
- contextHash
79305
- };
79306
- const previousBlockedState = await this.store.getLastBlockedState(agentId);
79307
- if (previousBlockedState && isBlockedStateDuplicate(currentBlockedState, previousBlockedState)) {
79308
- await this.completeRun(agentId, run.id, {
79309
- status: "completed",
79310
- resultJson: { reason: "blocked_duplicate", taskId: resolvedTaskId2, blockedBy }
79311
- });
79312
- return await this.store.getRunDetail(agentId, run.id);
79313
- }
79314
- const blockedMessage = `Task is blocked by ${blockedBy}; waiting for dependency/context changes before retrying.`;
79315
- await taskStore.addComment(resolvedTaskId2, blockedMessage, "agent", void 0, runContext);
79316
- await audit.database({ type: "task:comment:add", target: resolvedTaskId2, metadata: { blockedBy } });
79317
- await this.store.setLastBlockedState(agentId, currentBlockedState);
79318
- heartbeatLog.log(`Task ${resolvedTaskId2} is blocked by ${blockedBy} \u2014 recorded blocked state`);
79319
- await this.completeRun(agentId, run.id, {
79320
- status: "completed",
79321
- resultJson: { reason: "blocked", taskId: resolvedTaskId2, blockedBy }
79322
- });
79323
- return await this.store.getRunDetail(agentId, run.id);
79324
- }
79325
79846
  }
79326
79847
  }
79327
- if (!isNoTaskRun) {
79328
- await this.store.clearLastBlockedState(agentId);
79329
- }
79330
79848
  const STDOUT_EXCERPT_LIMIT = 4e3;
79331
79849
  let outputLength = 0;
79332
79850
  let toolCallCount = 0;
@@ -79432,6 +79950,17 @@ not loop on the same plan across heartbeats without recording why.`;
79432
79950
  baseHeartbeatSystemPrompt,
79433
79951
  [resolvedInstructionsForIdentity, memoryInstructions, selfImprovePrompt].filter((part) => part.trim()).join("\n\n")
79434
79952
  );
79953
+ const heartbeatContributions = this.pluginRunner?.getPromptContributionsForSurface("heartbeat") ?? [];
79954
+ if (heartbeatContributions.length > 0) {
79955
+ heartbeatLog.log(`applied ${heartbeatContributions.length} plugin prompt contributions for heartbeat surface`);
79956
+ }
79957
+ const heartbeatPluginContributions = buildPluginPromptSection(
79958
+ "heartbeat",
79959
+ this.pluginRunner
79960
+ );
79961
+ const systemPromptFinal = heartbeatPluginContributions ? `${systemPrompt}
79962
+
79963
+ ${heartbeatPluginContributions}` : systemPrompt;
79435
79964
  heartbeatTools.push(heartbeatDoneTool);
79436
79965
  if (isNoTaskRun) {
79437
79966
  agentLogger = new AgentLogger({
@@ -79453,7 +79982,7 @@ not loop on the same plan across heartbeats without recording why.`;
79453
79982
  runtimeHint: extractRuntimeHint2(agent.runtimeConfig),
79454
79983
  pluginRunner: this.pluginRunner,
79455
79984
  cwd: rootDir,
79456
- systemPrompt,
79985
+ systemPrompt: systemPromptFinal,
79457
79986
  tools: "coding",
79458
79987
  customTools: heartbeatTools,
79459
79988
  ...(() => {
@@ -79656,7 +80185,7 @@ ${taskDetail.prompt}` : "No PROMPT.md available.",
79656
80185
  try {
79657
80186
  const runWithPrompts = {
79658
80187
  ...run,
79659
- systemPrompt: truncatePrompt(systemPrompt, 1e5),
80188
+ systemPrompt: truncatePrompt(systemPromptFinal, 1e5),
79660
80189
  executionPrompt: truncatePrompt(executionPrompt, 1e5),
79661
80190
  heartbeatProcedureSource: customProcedure ? "custom" : "default"
79662
80191
  };
@@ -79803,8 +80332,6 @@ ${taskDetail.prompt}` : "No PROMPT.md available.",
79803
80332
  let health = "healthy";
79804
80333
  if (report.state === "paused") {
79805
80334
  health = report.pauseReason ? `paused (${report.pauseReason})` : "paused";
79806
- } else if (report.state === "terminated") {
79807
- health = "terminated";
79808
80335
  } else if (report.state === "error") {
79809
80336
  health = "**stuck**";
79810
80337
  } else if (report.state === "running") {
@@ -79819,7 +80346,6 @@ ${taskDetail.prompt}` : "No PROMPT.md available.",
79819
80346
  });
79820
80347
  const hasStuck = rows.some((row) => row.includes("**stuck**"));
79821
80348
  const hasStale = rows.some((row) => row.includes("**stale**"));
79822
- const hasTerminated = rows.some((row) => row.includes("terminated"));
79823
80349
  const actionLines = ["### Actions for Unresponsive Reports"];
79824
80350
  if (hasStuck) {
79825
80351
  actionLines.push("- For **stuck** reports: consider sending a message via fn_send_message asking for status, or reassigning their task via fn_delegate_task to a healthy agent.");
@@ -79827,9 +80353,6 @@ ${taskDetail.prompt}` : "No PROMPT.md available.",
79827
80353
  if (hasStale) {
79828
80354
  actionLines.push("- For **stale** reports: the agent may have lost its heartbeat trigger \u2014 create a follow-up task to investigate.");
79829
80355
  }
79830
- if (hasTerminated) {
79831
- actionLines.push("- For **terminated** reports: if they had active work, reassign their tasks or spawn replacement agents.");
79832
- }
79833
80356
  return [
79834
80357
  "## Reports Health Check",
79835
80358
  "",
@@ -79985,6 +80508,7 @@ ${taskDetail.prompt}` : "No PROMPT.md available.",
79985
80508
  }
79986
80509
  }
79987
80510
  }
80511
+ await this.reconcileOrphanedRunningAgents();
79988
80512
  }
79989
80513
  async handleMissedHeartbeat(tracked, reason) {
79990
80514
  await this.store.recordHeartbeat(tracked.agentId, "missed", tracked.runId);
@@ -79995,12 +80519,21 @@ ${taskDetail.prompt}` : "No PROMPT.md available.",
79995
80519
  const elapsed = now - tracked.lastSeen;
79996
80520
  const reason = `No heartbeat for ${formatDuration(elapsed)} (2\xD7 timeout threshold: ${formatDuration(heartbeatTimeoutMs * 2)})`;
79997
80521
  heartbeatLog.warn(`Recovering unresponsive agent ${tracked.agentId}: ${reason}`);
80522
+ const runIdToTerminate = tracked.runId;
79998
80523
  try {
79999
80524
  tracked.session.dispose();
80000
80525
  } catch (err) {
80001
80526
  heartbeatLog.warn(`Error disposing session for ${tracked.agentId}: ${err instanceof Error ? err.message : String(err)}`);
80002
80527
  }
80003
80528
  this.untrackAgent(tracked.agentId);
80529
+ try {
80530
+ await this.completeRun(tracked.agentId, runIdToTerminate, {
80531
+ status: "terminated",
80532
+ stderrExcerpt: reason
80533
+ });
80534
+ } catch (err) {
80535
+ heartbeatLog.warn(`completeRun(terminated) failed for ${tracked.agentId}/${runIdToTerminate}: ${err instanceof Error ? err.message : String(err)}`);
80536
+ }
80004
80537
  try {
80005
80538
  await this.pauseAgent(tracked.agentId, { pauseReason: "heartbeat-unresponsive", stopActiveRun: false });
80006
80539
  } catch (err) {
@@ -80030,10 +80563,12 @@ ${taskDetail.prompt}` : "No PROMPT.md available.",
80030
80563
  updatedListener = null;
80031
80564
  configRevisionListener = null;
80032
80565
  deletedListener = null;
80033
- constructor(store, callback, taskStore) {
80566
+ isTaskExecuting;
80567
+ constructor(store, callback, taskStore, options) {
80034
80568
  this.store = store;
80035
80569
  this.callback = callback;
80036
80570
  this.taskStore = taskStore;
80571
+ this.isTaskExecuting = options?.isTaskExecuting;
80037
80572
  }
80038
80573
  /**
80039
80574
  * Start the scheduler. Enables assignment watching.
@@ -80245,6 +80780,10 @@ ${taskDetail.prompt}` : "No PROMPT.md available.",
80245
80780
  heartbeatLog.log(`Assignment trigger skipped for ${agent.id} (active run)`);
80246
80781
  return;
80247
80782
  }
80783
+ if (runtimeConfig.allowParallelExecution === false && this.isTaskExecuting?.(taskId)) {
80784
+ heartbeatLog.log(`Assignment tick skipped for ${agent.id} (parallel execution disabled, task ${taskId} executing)`);
80785
+ return;
80786
+ }
80248
80787
  let budgetStatus;
80249
80788
  try {
80250
80789
  budgetStatus = await this.store.getBudgetStatus(agent.id);
@@ -80415,6 +80954,11 @@ ${taskDetail.prompt}` : "No PROMPT.md available.",
80415
80954
  heartbeatLog.log(`Timer tick skipped for ${agentId} (active run)`);
80416
80955
  return;
80417
80956
  }
80957
+ const timerRc = agent.runtimeConfig ?? {};
80958
+ if (timerRc.allowParallelExecution === false && agent.taskId && this.isTaskExecuting?.(agent.taskId)) {
80959
+ heartbeatLog.log(`Timer tick skipped for ${agentId} (parallel execution disabled, task ${agent.taskId} executing)`);
80960
+ return;
80961
+ }
80418
80962
  if (this.taskStore) {
80419
80963
  const settings = await this.taskStore.getSettings();
80420
80964
  if (settings.globalPause) {
@@ -82072,6 +82616,7 @@ var init_stuck_task_detector = __esm({
82072
82616
  onStuck;
82073
82617
  beforeRequeue;
82074
82618
  onLoopDetected;
82619
+ paused = false;
82075
82620
  /**
82076
82621
  * Start the polling loop that checks for stuck tasks.
82077
82622
  * Safe to call multiple times (no-ops if already running).
@@ -82297,6 +82842,29 @@ var init_stuck_task_detector = __esm({
82297
82842
  }
82298
82843
  this.tracked.delete(taskId);
82299
82844
  }
82845
+ /**
82846
+ * Pause stuck detection checks while the engine is in a paused lifecycle.
82847
+ * Active tracked sessions are preserved and refreshed on resume.
82848
+ */
82849
+ pause() {
82850
+ if (this.paused) return;
82851
+ this.paused = true;
82852
+ }
82853
+ /**
82854
+ * Resume stuck detection checks and refresh tracked timestamps so the paused
82855
+ * interval does not count as inactivity/no-progress time.
82856
+ */
82857
+ resume() {
82858
+ if (!this.paused) return;
82859
+ this.paused = false;
82860
+ if (this.tracked.size === 0) return;
82861
+ const now = Date.now();
82862
+ for (const entry of this.tracked.values()) {
82863
+ entry.lastActivity = now;
82864
+ entry.lastProgressAt = now;
82865
+ entry.activitySinceProgress = 0;
82866
+ }
82867
+ }
82300
82868
  /**
82301
82869
  * Check for stuck tasks immediately, outside the normal polling cycle.
82302
82870
  * Safe to call at any time — will no-op if no tasks are tracked or timeout is disabled.
@@ -82318,6 +82886,7 @@ var init_stuck_task_detector = __esm({
82318
82886
  */
82319
82887
  async checkStuckTasks() {
82320
82888
  if (this.tracked.size === 0) return;
82889
+ if (this.paused) return;
82321
82890
  let settings;
82322
82891
  try {
82323
82892
  settings = await this.store.getSettings();
@@ -84733,6 +85302,282 @@ var init_plugin_runner = __esm({
84733
85302
  }
84734
85303
  });
84735
85304
 
85305
+ // ../engine/src/ephemeral-worker-manager.ts
85306
+ var TERMINAL_TASK_COLUMNS, EphemeralWorkerManager;
85307
+ var init_ephemeral_worker_manager = __esm({
85308
+ "../engine/src/ephemeral-worker-manager.ts"() {
85309
+ "use strict";
85310
+ init_src();
85311
+ TERMINAL_TASK_COLUMNS = /* @__PURE__ */ new Set(["done", "archived"]);
85312
+ EphemeralWorkerManager = class {
85313
+ agentStore;
85314
+ taskStore;
85315
+ log;
85316
+ isDeletionPendingExternal;
85317
+ /** taskId → owner. In-memory only; on-disk fallback covers restart gaps. */
85318
+ taskAgentMap = /* @__PURE__ */ new Map();
85319
+ /** agentIds with in-flight delete; prevents racing parallel cleanup paths. */
85320
+ pendingDeletions = /* @__PURE__ */ new Set();
85321
+ stateChangeListener;
85322
+ constructor(options) {
85323
+ this.agentStore = options.agentStore;
85324
+ this.taskStore = options.taskStore;
85325
+ this.log = options.logger;
85326
+ this.isDeletionPendingExternal = options.isDeletionPendingExternal ?? (() => false);
85327
+ }
85328
+ // ── public surface ───────────────────────────────────────────────────────
85329
+ /**
85330
+ * Establish ownership for a task that just started executing.
85331
+ * - If the task carries an `assignedAgentId` pointing at a durable agent,
85332
+ * bind that agent to the task and flip it through active → running.
85333
+ * - Otherwise spawn (or reclaim) an ephemeral `executor-${task.id}` worker.
85334
+ *
85335
+ * Cross-restart safe: looks up an existing ephemeral by name before
85336
+ * creating a new one.
85337
+ */
85338
+ async onTaskStart(task) {
85339
+ try {
85340
+ const assignedAgentId = task.assignedAgentId;
85341
+ if (assignedAgentId) {
85342
+ const assignedAgent = await this.agentStore.getAgent(assignedAgentId);
85343
+ if (assignedAgent && !isEphemeralAgent(assignedAgent)) {
85344
+ this.taskAgentMap.set(task.id, { agentId: assignedAgent.id, ephemeral: false });
85345
+ await this.agentStore.syncExecutionTaskLink(assignedAgent.id, task.id);
85346
+ const currentState = assignedAgent.state;
85347
+ if (currentState !== "running") {
85348
+ if (currentState !== "active") {
85349
+ await this.agentStore.updateAgentState(assignedAgent.id, "active");
85350
+ }
85351
+ await this.agentStore.updateAgentState(assignedAgent.id, "running");
85352
+ }
85353
+ return { agentId: assignedAgent.id, ephemeral: false };
85354
+ }
85355
+ }
85356
+ const cached = this.taskAgentMap.get(task.id);
85357
+ if (cached) {
85358
+ this.log.warn(`Skipping task-worker creation for ${task.id}: task already has execution owner`);
85359
+ return cached;
85360
+ }
85361
+ const existing = await this.lookupExistingByName(`executor-${task.id}`);
85362
+ if (existing) {
85363
+ if (existing.taskId === task.id) {
85364
+ this.taskAgentMap.set(task.id, { agentId: existing.id, ephemeral: true });
85365
+ this.log.log(`Reusing existing ephemeral worker ${existing.id} for task ${task.id} after restart`);
85366
+ return { agentId: existing.id, ephemeral: true };
85367
+ }
85368
+ try {
85369
+ await this.agentStore.deleteAgent(existing.id);
85370
+ this.log.log(`Deleted stale ephemeral worker ${existing.id} for task ${task.id} before respawn`);
85371
+ } catch (delErr) {
85372
+ this.log.warn(`Failed to delete stale ephemeral worker ${existing.id} for ${task.id}:`, delErr);
85373
+ }
85374
+ }
85375
+ const agent = await this.agentStore.createAgent({
85376
+ name: `executor-${task.id}`,
85377
+ role: "executor",
85378
+ metadata: {
85379
+ agentKind: "task-worker",
85380
+ taskWorker: true,
85381
+ managedBy: "task-executor"
85382
+ },
85383
+ runtimeConfig: { enabled: false }
85384
+ });
85385
+ this.taskAgentMap.set(task.id, { agentId: agent.id, ephemeral: true });
85386
+ await this.agentStore.assignTask(agent.id, task.id);
85387
+ await this.agentStore.updateAgentState(agent.id, "active");
85388
+ await this.agentStore.updateAgentState(agent.id, "running");
85389
+ return { agentId: agent.id, ephemeral: true };
85390
+ } catch (err) {
85391
+ this.log.warn(`Failed to initialize execution owner for task ${task.id}:`, err);
85392
+ return null;
85393
+ }
85394
+ }
85395
+ /**
85396
+ * Tear down ownership after a task completes or errors.
85397
+ * Final state for durable agents matches the outcome (idle/error).
85398
+ * Ephemeral workers are deleted regardless; if the in-memory owner is
85399
+ * missing (e.g. restart between onStart and this callback), falls back
85400
+ * to a name-based lookup so the worker still gets cleaned up.
85401
+ */
85402
+ async onTaskComplete(taskId) {
85403
+ return this.finalize(taskId, "active", "completion");
85404
+ }
85405
+ async onTaskError(taskId) {
85406
+ return this.finalize(taskId, "error", "error");
85407
+ }
85408
+ /**
85409
+ * Listener for agent:stateChanged. Cleans up ephemerals that get halted
85410
+ * out-of-band — e.g. by HeartbeatMonitor flipping them to paused/error
85411
+ * outside the onComplete/onError callbacks.
85412
+ *
85413
+ * Returns the listener fn so the caller can detach it on shutdown.
85414
+ */
85415
+ attachStateChangeListener() {
85416
+ if (this.stateChangeListener) return this.stateChangeListener;
85417
+ const listener = (agentId, from, to) => {
85418
+ if (to !== "paused" && to !== "error") return;
85419
+ if (from === to) return;
85420
+ if (this.pendingDeletions.has(agentId) || this.isDeletionPendingExternal(agentId)) return;
85421
+ void (async () => {
85422
+ try {
85423
+ const agent = await this.agentStore.getAgent(agentId);
85424
+ if (!agent) return;
85425
+ const isWorkerLike = isEphemeralAgent(agent) || agent.metadata?.taskWorker === true || agent.metadata?.agentKind === "task-worker" || agent.metadata?.agentKind === "spawned";
85426
+ if (!isWorkerLike) return;
85427
+ await this.deleteEphemeralAgent(agentId, "halt-listener");
85428
+ } catch (err) {
85429
+ this.log.warn(`Failed to process halt event for agent ${agentId}: ${this.formatError(err)}`);
85430
+ }
85431
+ })();
85432
+ };
85433
+ this.stateChangeListener = listener;
85434
+ this.agentStore.on("agent:stateChanged", listener);
85435
+ return listener;
85436
+ }
85437
+ detachStateChangeListener() {
85438
+ if (!this.stateChangeListener) return;
85439
+ this.agentStore.off("agent:stateChanged", this.stateChangeListener);
85440
+ this.stateChangeListener = void 0;
85441
+ }
85442
+ /**
85443
+ * Startup sweep. Returns the count of zombies cleaned up. Best-effort —
85444
+ * failures are logged and skipped so they never block runtime startup.
85445
+ *
85446
+ * Survivors after this pass: agents bound to a still-in-progress task.
85447
+ * Anything else (no taskId, terminal task column, or halted state) is
85448
+ * by definition a leak.
85449
+ */
85450
+ async reconcileOrphaned() {
85451
+ let cleanedCount = 0;
85452
+ try {
85453
+ const allAgents = await this.agentStore.listAgents({ includeEphemeral: true });
85454
+ for (const agent of allAgents) {
85455
+ if (!isEphemeralAgent(agent)) continue;
85456
+ if (!await this.shouldDeleteOnSweep(agent)) continue;
85457
+ try {
85458
+ await this.agentStore.deleteAgent(agent.id);
85459
+ cleanedCount += 1;
85460
+ } catch (err) {
85461
+ if (this.isBenignDeleteRace(agent.id, err)) {
85462
+ cleanedCount += 1;
85463
+ continue;
85464
+ }
85465
+ this.log.warn(`Startup sweep failed to delete ephemeral agent ${agent.id}: ${this.formatError(err)}`);
85466
+ }
85467
+ }
85468
+ } catch (err) {
85469
+ this.log.warn(`Startup ephemeral sweep failed: ${this.formatError(err)}`);
85470
+ }
85471
+ if (cleanedCount > 0) {
85472
+ this.log.log(`Startup ephemeral sweep cleaned ${cleanedCount} orphaned agent(s)`);
85473
+ }
85474
+ return cleanedCount;
85475
+ }
85476
+ /** Drop in-memory state. Call on runtime stop. */
85477
+ reset() {
85478
+ this.taskAgentMap.clear();
85479
+ this.pendingDeletions.clear();
85480
+ }
85481
+ /** True if a delete is in flight; lets external callers avoid double-delete races. */
85482
+ isDeletionPending(agentId) {
85483
+ return this.pendingDeletions.has(agentId);
85484
+ }
85485
+ getOwner(taskId) {
85486
+ return this.taskAgentMap.get(taskId);
85487
+ }
85488
+ // ── internals ────────────────────────────────────────────────────────────
85489
+ async finalize(taskId, terminalState, reason) {
85490
+ const owner = this.taskAgentMap.get(taskId) ?? await this.recoverOwnerFromDisk(taskId);
85491
+ if (!owner) return;
85492
+ const { agentId, ephemeral } = owner;
85493
+ if (ephemeral) {
85494
+ this.pendingDeletions.add(agentId);
85495
+ }
85496
+ try {
85497
+ await this.agentStore.updateAgentState(agentId, terminalState);
85498
+ } catch (err) {
85499
+ this.log.warn(`Failed to update agent ${agentId} to ${terminalState} (${reason}): ${this.formatError(err)}`);
85500
+ }
85501
+ try {
85502
+ await this.agentStore.syncExecutionTaskLink(agentId, void 0);
85503
+ } catch (err) {
85504
+ this.log.warn(`Failed to clear execution task link for agent ${agentId} on ${reason}: ${this.formatError(err)}`);
85505
+ }
85506
+ this.taskAgentMap.delete(taskId);
85507
+ if (!ephemeral) return;
85508
+ try {
85509
+ await this.agentStore.deleteAgent(agentId);
85510
+ } catch (err) {
85511
+ if (this.isBenignDeleteRace(agentId, err)) return;
85512
+ this.log.warn(`Failed to delete agent ${agentId} after ${reason}: ${this.formatError(err)}`);
85513
+ } finally {
85514
+ this.pendingDeletions.delete(agentId);
85515
+ }
85516
+ }
85517
+ /**
85518
+ * Look up the ephemeral worker on disk when the in-memory map has no
85519
+ * record. Covers the cross-restart case where onComplete fires in a
85520
+ * different process session than the onStart that created the worker.
85521
+ */
85522
+ async recoverOwnerFromDisk(taskId) {
85523
+ try {
85524
+ const candidate = await this.lookupExistingByName(`executor-${taskId}`);
85525
+ if (candidate) {
85526
+ this.log.log(`Recovered ephemeral owner ${candidate.id} for task ${taskId} from disk (cross-restart)`);
85527
+ return { agentId: candidate.id, ephemeral: true };
85528
+ }
85529
+ } catch (err) {
85530
+ this.log.warn(`Cross-restart owner lookup failed for task ${taskId}: ${this.formatError(err)}`);
85531
+ }
85532
+ return null;
85533
+ }
85534
+ async lookupExistingByName(name) {
85535
+ try {
85536
+ const found = await this.agentStore.findAgentByName(name);
85537
+ if (found && isEphemeralAgent(found)) return found;
85538
+ return null;
85539
+ } catch (err) {
85540
+ this.log.warn(`findAgentByName(${name}) failed: ${this.formatError(err)}`);
85541
+ return null;
85542
+ }
85543
+ }
85544
+ async shouldDeleteOnSweep(agent) {
85545
+ if (agent.state === "paused" || agent.state === "error") return true;
85546
+ if (!agent.taskId) return true;
85547
+ try {
85548
+ const task = await this.taskStore.getTask(agent.taskId);
85549
+ if (!task) return true;
85550
+ if (TERMINAL_TASK_COLUMNS.has(task.column)) return true;
85551
+ return task.column !== "in-progress";
85552
+ } catch {
85553
+ return true;
85554
+ }
85555
+ }
85556
+ async deleteEphemeralAgent(agentId, reason) {
85557
+ if (this.pendingDeletions.has(agentId)) return;
85558
+ this.pendingDeletions.add(agentId);
85559
+ try {
85560
+ await this.agentStore.deleteAgent(agentId);
85561
+ } catch (err) {
85562
+ if (this.isBenignDeleteRace(agentId, err)) return;
85563
+ this.log.warn(`Failed to delete ephemeral agent ${agentId} (${reason}): ${this.formatError(err)}`);
85564
+ } finally {
85565
+ this.pendingDeletions.delete(agentId);
85566
+ }
85567
+ }
85568
+ isBenignDeleteRace(agentId, err) {
85569
+ const msg = (err instanceof Error ? err.message : String(err)).toLowerCase();
85570
+ if (msg.includes("already deleted") || msg.includes("already removed")) return true;
85571
+ if (msg.includes(`agent ${agentId.toLowerCase()} not found`)) return true;
85572
+ return false;
85573
+ }
85574
+ formatError(err) {
85575
+ return err instanceof Error ? err.message : String(err);
85576
+ }
85577
+ };
85578
+ }
85579
+ });
85580
+
84736
85581
  // ../engine/src/runtimes/in-process-runtime.ts
84737
85582
  import { EventEmitter as EventEmitter19 } from "node:events";
84738
85583
  var InProcessRuntime;
@@ -84755,6 +85600,7 @@ var init_in_process_runtime = __esm({
84755
85600
  init_mission_autopilot();
84756
85601
  init_mission_execution_loop();
84757
85602
  init_triage();
85603
+ init_ephemeral_worker_manager();
84758
85604
  InProcessRuntime = class extends EventEmitter19 {
84759
85605
  /**
84760
85606
  * @param config - Runtime configuration
@@ -84779,8 +85625,12 @@ var init_in_process_runtime = __esm({
84779
85625
  agentStore;
84780
85626
  heartbeatMonitor;
84781
85627
  triggerScheduler;
84782
- /** Maps task IDs to execution owner metadata for lifecycle tracking */
84783
- taskAgentMap = /* @__PURE__ */ new Map();
85628
+ /**
85629
+ * Coordinates the ephemeral task-worker lifecycle (spawn dedup, finalize,
85630
+ * halt-listener cleanup, startup sweep). See `ephemeral-worker-manager.ts`.
85631
+ * Created once the AgentStore is available; guard call sites with `?`.
85632
+ */
85633
+ workerManager;
84784
85634
  lastActivityAt = (/* @__PURE__ */ new Date()).toISOString();
84785
85635
  pluginRunner;
84786
85636
  pluginStore;
@@ -84793,10 +85643,6 @@ var init_in_process_runtime = __esm({
84793
85643
  triageProcessor;
84794
85644
  messageStore;
84795
85645
  concurrencyChangedListener;
84796
- /** Set of agent IDs with in-flight ephemeral cleanup (prevents duplicate deletion) */
84797
- pendingEphemeralDeletions = /* @__PURE__ */ new Set();
84798
- /** Listener for agent:stateChanged events to clean up terminated ephemeral agents */
84799
- ephemeralTerminationListener;
84800
85646
  /**
84801
85647
  * Optional callback the runtime forwards to SelfHealingManager so that
84802
85648
  * stale-merge recovery can re-enqueue tasks immediately. Set by ProjectEngine
@@ -84826,7 +85672,7 @@ var init_in_process_runtime = __esm({
84826
85672
  runtimeLog.log(`Starting InProcessRuntime for project ${this.config.projectId}`);
84827
85673
  try {
84828
85674
  const {
84829
- TaskStore: TaskStore2,
85675
+ TaskStore: TaskStore3,
84830
85676
  PluginStore: PluginStoreClass,
84831
85677
  PluginLoader: PluginLoaderClass,
84832
85678
  MessageStore: MessageStoreClass
@@ -84835,7 +85681,7 @@ var init_in_process_runtime = __esm({
84835
85681
  this.taskStore = this.config.externalTaskStore;
84836
85682
  runtimeLog.log(`TaskStore provided externally for project ${this.config.projectId}`);
84837
85683
  } else {
84838
- this.taskStore = new TaskStore2(this.config.workingDirectory);
85684
+ this.taskStore = new TaskStore3(this.config.workingDirectory);
84839
85685
  await this.taskStore.init();
84840
85686
  runtimeLog.log(`TaskStore initialized for project ${this.config.projectId}`);
84841
85687
  }
@@ -85004,84 +85850,13 @@ var init_in_process_runtime = __esm({
85004
85850
  onStart: (task, worktreePath) => {
85005
85851
  this.recordActivity();
85006
85852
  runtimeLog.log(`Started executing task ${task.id} in ${worktreePath}`);
85007
- if (!this.agentStore) return;
85008
- void (async () => {
85009
- try {
85010
- const assignedAgentId = task.assignedAgentId;
85011
- if (assignedAgentId) {
85012
- const assignedAgent = await this.agentStore.getAgent(assignedAgentId);
85013
- if (assignedAgent && !isEphemeralAgent(assignedAgent)) {
85014
- this.taskAgentMap.set(task.id, { agentId: assignedAgent.id, ephemeral: false });
85015
- await this.agentStore.syncExecutionTaskLink(assignedAgent.id, task.id);
85016
- const currentState = assignedAgent.state;
85017
- if (currentState !== "running") {
85018
- if (currentState !== "active") {
85019
- await this.agentStore.updateAgentState(assignedAgent.id, "active");
85020
- }
85021
- await this.agentStore.updateAgentState(assignedAgent.id, "running");
85022
- }
85023
- return;
85024
- }
85025
- }
85026
- if (this.taskAgentMap.has(task.id)) {
85027
- runtimeLog.warn(`Skipping task-worker creation for ${task.id}: task already has execution owner`);
85028
- return;
85029
- }
85030
- const agent = await this.agentStore.createAgent({
85031
- name: `executor-${task.id}`,
85032
- role: "executor",
85033
- metadata: {
85034
- agentKind: "task-worker",
85035
- taskWorker: true,
85036
- managedBy: "task-executor"
85037
- },
85038
- runtimeConfig: {
85039
- enabled: false
85040
- }
85041
- });
85042
- this.taskAgentMap.set(task.id, { agentId: agent.id, ephemeral: true });
85043
- await this.agentStore.assignTask(agent.id, task.id);
85044
- await this.agentStore.updateAgentState(agent.id, "active");
85045
- await this.agentStore.updateAgentState(agent.id, "running");
85046
- } catch (err) {
85047
- runtimeLog.warn(`Failed to initialize execution owner for task ${task.id}:`, err);
85048
- }
85049
- })();
85853
+ void this.workerManager?.onTaskStart(task);
85050
85854
  },
85051
85855
  onComplete: (task) => {
85052
85856
  this.recordActivity();
85053
85857
  runtimeLog.log(`Completed task ${task.id}`);
85054
85858
  this.recordTaskCompletion(task.id, true);
85055
- const owner = this.taskAgentMap.get(task.id);
85056
- if (owner && this.agentStore) {
85057
- const { agentId, ephemeral } = owner;
85058
- if (ephemeral) {
85059
- this.pendingEphemeralDeletions.add(agentId);
85060
- }
85061
- void this.agentStore.updateAgentState(agentId, "terminated").catch((err) => {
85062
- const msg = err instanceof Error ? err.message : String(err);
85063
- runtimeLog.warn(`Failed to update agent ${agentId} state to terminated (completion): ${msg}`);
85064
- });
85065
- void this.agentStore.syncExecutionTaskLink(agentId, void 0).catch((err) => {
85066
- const msg = err instanceof Error ? err.message : String(err);
85067
- runtimeLog.warn(`Failed to clear execution task link for agent ${agentId} on completion: ${msg}`);
85068
- });
85069
- this.taskAgentMap.delete(task.id);
85070
- if (!ephemeral) return;
85071
- void (async () => {
85072
- try {
85073
- await this.agentStore?.deleteAgent(agentId);
85074
- } catch (err) {
85075
- if (this.isBenignEphemeralDeleteRaceError(agentId, err)) {
85076
- return;
85077
- }
85078
- const msg = err instanceof Error ? err.message : String(err);
85079
- runtimeLog.warn(`Failed to delete agent ${agentId} after completion: ${msg}`);
85080
- } finally {
85081
- this.pendingEphemeralDeletions.delete(agentId);
85082
- }
85083
- })();
85084
- }
85859
+ void this.workerManager?.onTaskComplete(task.id);
85085
85860
  },
85086
85861
  onError: (task, error) => {
85087
85862
  this.recordActivity();
@@ -85099,36 +85874,7 @@ var init_in_process_runtime = __esm({
85099
85874
  }
85100
85875
  })();
85101
85876
  }
85102
- const owner = this.taskAgentMap.get(task.id);
85103
- if (owner && this.agentStore) {
85104
- const { agentId, ephemeral } = owner;
85105
- if (ephemeral) {
85106
- this.pendingEphemeralDeletions.add(agentId);
85107
- }
85108
- void this.agentStore.updateAgentState(agentId, "terminated").catch((err) => {
85109
- const msg = err instanceof Error ? err.message : String(err);
85110
- runtimeLog.warn(`Failed to update agent ${agentId} state to terminated (error): ${msg}`);
85111
- });
85112
- void this.agentStore.syncExecutionTaskLink(agentId, void 0).catch((err) => {
85113
- const msg = err instanceof Error ? err.message : String(err);
85114
- runtimeLog.warn(`Failed to clear execution task link for agent ${agentId} on error: ${msg}`);
85115
- });
85116
- this.taskAgentMap.delete(task.id);
85117
- if (!ephemeral) return;
85118
- void (async () => {
85119
- try {
85120
- await this.agentStore?.deleteAgent(agentId);
85121
- } catch (err) {
85122
- if (this.isBenignEphemeralDeleteRaceError(agentId, err)) {
85123
- return;
85124
- }
85125
- const msg = err instanceof Error ? err.message : String(err);
85126
- runtimeLog.warn(`Failed to delete agent ${agentId} after error: ${msg}`);
85127
- } finally {
85128
- this.pendingEphemeralDeletions.delete(agentId);
85129
- }
85130
- })();
85131
- }
85877
+ void this.workerManager?.onTaskError(task.id);
85132
85878
  }
85133
85879
  };
85134
85880
  this.executor = new TaskExecutor(
@@ -85155,6 +85901,13 @@ var init_in_process_runtime = __esm({
85155
85901
  },
85156
85902
  onTerminated: (agentId, reason) => {
85157
85903
  runtimeLog.warn(`Agent ${agentId} terminated (unresponsive): ${reason}`);
85904
+ },
85905
+ onRunCompleted: (agentId) => {
85906
+ if (this.executor) {
85907
+ void this.executor.resumeTaskForAgent(agentId).catch((err) => {
85908
+ runtimeLog.warn(`resumeTaskForAgent failed for ${agentId}: ${err instanceof Error ? err.message : String(err)}`);
85909
+ });
85910
+ }
85158
85911
  }
85159
85912
  });
85160
85913
  this.heartbeatMonitor.start();
@@ -85174,83 +85927,24 @@ var init_in_process_runtime = __esm({
85174
85927
  contextSnapshot: { ...context }
85175
85928
  });
85176
85929
  },
85177
- this.taskStore
85930
+ this.taskStore,
85931
+ { isTaskExecuting: (taskId) => this.executor.getExecutingTaskIds().has(taskId) }
85178
85932
  );
85179
85933
  this.triggerScheduler.start();
85180
85934
  const isHeartbeatEnabledAgent = (agent) => !isEphemeralAgent(agent) && agent.runtimeConfig?.enabled !== false;
85181
85935
  const isTickableHeartbeatState = (state) => state === "active" || state === "running" || state === "idle";
85182
85936
  const isTimerManagedAgent = (agent) => isHeartbeatEnabledAgent(agent) && isTickableHeartbeatState(agent.state);
85183
- this.ephemeralTerminationListener = (agentId, from, to) => {
85184
- if (to !== "terminated") return;
85185
- if (from === "terminated") return;
85186
- if (this.pendingEphemeralDeletions.has(agentId) || this.executor?.isEphemeralDeletionPending(agentId)) return;
85187
- void (async () => {
85188
- try {
85189
- const agent = await this.agentStore?.getAgent(agentId);
85190
- if (!agent) return;
85191
- if (!isEphemeralAgent(agent)) return;
85192
- this.pendingEphemeralDeletions.add(agentId);
85193
- try {
85194
- await this.agentStore?.deleteAgent(agentId);
85195
- } catch (err) {
85196
- if (this.isBenignEphemeralDeleteRaceError(agentId, err)) {
85197
- return;
85198
- }
85199
- const msg = err instanceof Error ? err.message : String(err);
85200
- runtimeLog.warn(`Failed to delete ephemeral agent ${agentId} after termination: ${msg}`);
85201
- } finally {
85202
- this.pendingEphemeralDeletions.delete(agentId);
85203
- }
85204
- } catch (err) {
85205
- const msg = err instanceof Error ? err.message : String(err);
85206
- runtimeLog.warn(`Failed to process termination event for agent ${agentId}: ${msg}`);
85207
- }
85208
- })();
85209
- };
85210
- this.agentStore.on("agent:stateChanged", this.ephemeralTerminationListener);
85211
- try {
85212
- const allAgents = await this.agentStore.listAgents({ includeEphemeral: true });
85213
- let cleanedCount = 0;
85214
- for (const agent of allAgents) {
85215
- if (!isEphemeralAgent(agent)) continue;
85216
- let shouldDelete = agent.state === "terminated" || agent.state === "error";
85217
- if (!shouldDelete && agent.taskId) {
85218
- try {
85219
- const task = await this.taskStore.getTask(agent.taskId);
85220
- if (!task || task.column !== "in-progress") {
85221
- shouldDelete = true;
85222
- }
85223
- } catch {
85224
- shouldDelete = true;
85225
- }
85226
- }
85227
- if (!shouldDelete) continue;
85228
- try {
85229
- if (agent.state !== "terminated") {
85230
- await this.agentStore.updateAgentState(agent.id, "terminated");
85231
- }
85232
- } catch (err) {
85233
- const msg = err instanceof Error ? err.message : String(err);
85234
- runtimeLog.warn(`Startup sweep failed to set ephemeral agent ${agent.id} terminated: ${msg}`);
85235
- }
85236
- try {
85237
- await this.agentStore.deleteAgent(agent.id);
85238
- cleanedCount += 1;
85239
- } catch (err) {
85240
- if (this.isBenignEphemeralDeleteRaceError(agent.id, err)) {
85241
- cleanedCount += 1;
85242
- continue;
85243
- }
85244
- const msg = err instanceof Error ? err.message : String(err);
85245
- runtimeLog.warn(`Startup sweep failed to delete ephemeral agent ${agent.id}: ${msg}`);
85246
- }
85247
- }
85248
- if (cleanedCount > 0) {
85249
- runtimeLog.log(`Startup ephemeral sweep cleaned ${cleanedCount} orphaned agent(s)`);
85250
- }
85251
- } catch (err) {
85252
- const msg = err instanceof Error ? err.message : String(err);
85253
- runtimeLog.warn(`Startup ephemeral sweep failed (continuing): ${msg}`);
85937
+ if (this.agentStore && !this.workerManager) {
85938
+ this.workerManager = new EphemeralWorkerManager({
85939
+ agentStore: this.agentStore,
85940
+ taskStore: this.taskStore,
85941
+ logger: runtimeLog,
85942
+ isDeletionPendingExternal: (agentId) => this.executor?.isEphemeralDeletionPending(agentId) ?? false
85943
+ });
85944
+ }
85945
+ if (this.workerManager) {
85946
+ this.workerManager.attachStateChangeListener();
85947
+ await this.workerManager.reconcileOrphaned();
85254
85948
  }
85255
85949
  try {
85256
85950
  const agents = await this.agentStore.listAgents();
@@ -85421,12 +86115,8 @@ var init_in_process_runtime = __esm({
85421
86115
  this.routineScheduler.stop();
85422
86116
  runtimeLog.log("RoutineScheduler stopped");
85423
86117
  }
85424
- if (this.ephemeralTerminationListener && this.agentStore) {
85425
- this.agentStore.off("agent:stateChanged", this.ephemeralTerminationListener);
85426
- this.ephemeralTerminationListener = void 0;
85427
- runtimeLog.log("AgentStore agent:stateChanged listener removed");
85428
- }
85429
- this.pendingEphemeralDeletions.clear();
86118
+ this.workerManager?.detachStateChangeListener();
86119
+ this.workerManager?.reset();
85430
86120
  this.executor?.disposeEphemeralTimers();
85431
86121
  if (this.triggerScheduler) {
85432
86122
  this.triggerScheduler.stop();
@@ -85722,21 +86412,6 @@ var init_in_process_runtime = __esm({
85722
86412
  });
85723
86413
  runtimeLog.log("Event forwarding setup complete");
85724
86414
  }
85725
- /**
85726
- * Returns true when an ephemeral delete failure is expected due to cleanup races
85727
- * (for example the agent was already removed by a parallel cleanup path).
85728
- */
85729
- isBenignEphemeralDeleteRaceError(agentId, err) {
85730
- const msg = err instanceof Error ? err.message : String(err);
85731
- const normalized = msg.toLowerCase();
85732
- if (normalized.includes("already deleted") || normalized.includes("already removed")) {
85733
- return true;
85734
- }
85735
- if (normalized.includes(`agent ${agentId.toLowerCase()} not found`)) {
85736
- return true;
85737
- }
85738
- return /^agent\s+.+\s+not found$/i.test(msg.trim());
85739
- }
85740
86415
  /**
85741
86416
  * Update status and emit health-changed event.
85742
86417
  */
@@ -89469,6 +90144,37 @@ ${detail}`
89469
90144
  }
89470
90145
  }
89471
90146
  wireSettingsListeners(store) {
90147
+ const applyDetectorPauseLifecycle = (paused, source) => {
90148
+ try {
90149
+ const detector = this.runtime.stuckTaskDetector;
90150
+ if (paused) {
90151
+ detector?.pause?.();
90152
+ } else {
90153
+ detector?.resume?.();
90154
+ }
90155
+ } catch (err) {
90156
+ runtimeLog.warn(
90157
+ `${source}: stuck detector ${paused ? "pause" : "resume"} hook failed: ${err instanceof Error ? err.message : String(err)}`
90158
+ );
90159
+ }
90160
+ };
90161
+ const onPauseLifecycleTransition = ({
90162
+ settings: s,
90163
+ previous: prev
90164
+ }) => {
90165
+ const wasPaused = prev.globalPause || prev.enginePaused;
90166
+ const isPaused = s.globalPause || s.enginePaused;
90167
+ if (!wasPaused && isPaused) {
90168
+ const source = s.globalPause && !prev.globalPause ? "Global pause" : "Engine pause";
90169
+ applyDetectorPauseLifecycle(true, source);
90170
+ }
90171
+ if (wasPaused && !isPaused) {
90172
+ const source = prev.globalPause && !s.globalPause ? "Global unpause" : "Engine unpause";
90173
+ applyDetectorPauseLifecycle(false, source);
90174
+ }
90175
+ };
90176
+ store.on("settings:updated", onPauseLifecycleTransition);
90177
+ this.settingsHandlers.push(onPauseLifecycleTransition);
89472
90178
  const onGlobalPause = ({ settings, previous }) => {
89473
90179
  if (settings.globalPause && !previous.globalPause) {
89474
90180
  if (this.mergeAbortController) {
@@ -100000,12 +100706,14 @@ var init_register_settings_sync_inbound_routes = __esm({
100000
100706
  });
100001
100707
 
100002
100708
  // ../dashboard/src/routes/register-agent-core-routes.ts
100709
+ var MAX_AVATAR_BYTES;
100003
100710
  var init_register_agent_core_routes = __esm({
100004
100711
  "../dashboard/src/routes/register-agent-core-routes.ts"() {
100005
100712
  "use strict";
100006
100713
  init_src();
100007
100714
  init_api_error();
100008
100715
  init_src2();
100716
+ MAX_AVATAR_BYTES = 2 * 1024 * 1024;
100009
100717
  }
100010
100718
  });
100011
100719
 
@@ -101038,6 +101746,7 @@ import { Router as Router3 } from "express";
101038
101746
  var init_mission_routes = __esm({
101039
101747
  "../dashboard/src/mission-routes.ts"() {
101040
101748
  "use strict";
101749
+ init_src();
101041
101750
  init_project_store_resolver();
101042
101751
  init_src();
101043
101752
  init_sse_buffer();
@@ -106920,11 +107629,69 @@ var INSIGHT_CATEGORIES = [
106920
107629
  var INSIGHT_STATUSES = ["generated", "confirmed", "stale", "dismissed", "archived"];
106921
107630
  var INSIGHT_RUN_STATUSES = ["pending", "running", "completed", "failed", "cancelled"];
106922
107631
  var INSIGHT_RUN_TRIGGERS = ["schedule", "manual", "task_completion", "merge_event", "api"];
107632
+ function getTaskSourceAgentLabel(task) {
107633
+ const metadataAgentName = task.sourceMetadata?.agentName;
107634
+ if (typeof metadataAgentName === "string" && metadataAgentName.trim().length > 0) {
107635
+ return metadataAgentName.trim();
107636
+ }
107637
+ if (typeof task.sourceAgentId === "string" && task.sourceAgentId.trim().length > 0) {
107638
+ return task.sourceAgentId.trim();
107639
+ }
107640
+ return void 0;
107641
+ }
107642
+ function getTaskSourceLabel(task) {
107643
+ switch (task.sourceType) {
107644
+ case "dashboard_ui":
107645
+ return "Dashboard";
107646
+ case "quick_chat":
107647
+ return "Quick Chat";
107648
+ case "chat_session":
107649
+ return "Chat Session";
107650
+ case "agent_heartbeat": {
107651
+ const sourceAgent = getTaskSourceAgentLabel(task);
107652
+ return sourceAgent ? `Agent (${sourceAgent})` : "Agent";
107653
+ }
107654
+ case "automation": {
107655
+ const sourceAgent = getTaskSourceAgentLabel(task);
107656
+ return sourceAgent ? `Automation (${sourceAgent})` : "Automation";
107657
+ }
107658
+ case "cron":
107659
+ return "Scheduled Task";
107660
+ case "workflow_step":
107661
+ return "Workflow Step";
107662
+ case "github_import": {
107663
+ const issueUrl = task.sourceMetadata?.issueUrl;
107664
+ return typeof issueUrl === "string" && issueUrl.length > 0 ? `GitHub Import (${issueUrl})` : "GitHub Import";
107665
+ }
107666
+ case "research": {
107667
+ const findingLabel = task.sourceMetadata?.findingLabel;
107668
+ if (typeof findingLabel === "string" && findingLabel.length > 0) {
107669
+ return `Research (${findingLabel})`;
107670
+ }
107671
+ const runId = task.sourceMetadata?.runId;
107672
+ return typeof runId === "string" && runId.length > 0 ? `Research (${runId})` : "Research";
107673
+ }
107674
+ case "task_refine":
107675
+ return task.sourceParentTaskId ? `Refinement of ${task.sourceParentTaskId}` : "Refinement";
107676
+ case "task_duplicate":
107677
+ return task.sourceParentTaskId ? `Duplicate of ${task.sourceParentTaskId}` : "Duplicate";
107678
+ case "cli":
107679
+ return "CLI";
107680
+ case "api":
107681
+ return "API";
107682
+ case "recovery":
107683
+ return "Recovery";
107684
+ default:
107685
+ return void 0;
107686
+ }
107687
+ }
106923
107688
  function formatTaskLine(t) {
106924
107689
  const label = t.title || t.description.slice(0, 60) + (t.description.length > 60 ? "\u2026" : "");
107690
+ const source = getTaskSourceLabel(t);
107691
+ const sourceSuffix = source ? ` [via: ${source}]` : "";
106925
107692
  const deps = t.dependencies.length ? ` [deps: ${t.dependencies.join(", ")}]` : "";
106926
107693
  const paused = t.paused ? " (paused)" : "";
106927
- return `${t.id} ${label}${deps}${paused}`;
107694
+ return `${t.id} ${label}${sourceSuffix}${deps}${paused}`;
106928
107695
  }
106929
107696
  async function getResearchAvailability(store) {
106930
107697
  const settings = await store.getSettings();
@@ -107260,6 +108027,10 @@ Column: triage
107260
108027
  if (task.dependencies.length) {
107261
108028
  lines.push(`Dependencies: ${task.dependencies.join(", ")}`);
107262
108029
  }
108030
+ const sourceLabel = getTaskSourceLabel(task);
108031
+ if (sourceLabel) {
108032
+ lines.push(`Created via: ${sourceLabel}`);
108033
+ }
107263
108034
  if (task.paused) lines.push("Status: PAUSED");
107264
108035
  lines.push("");
107265
108036
  if (task.steps.length > 0) {