@runfusion/fusion 0.2.7 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/bin.js +8436 -7001
  2. package/dist/client/assets/{AgentDetailView-BMrHuWGs.css → AgentDetailView-C1b9PC5l.css} +1 -1
  3. package/dist/client/assets/{AgentDetailView-B4lRk--v.js → AgentDetailView-CJIxNRq-.js} +3 -3
  4. package/dist/client/assets/{AgentsView-yCYBY2km.js → AgentsView-BS17exn3.js} +5 -5
  5. package/dist/client/assets/ChatView-BUlq3WNJ.js +1 -0
  6. package/dist/client/assets/{DevServerView-jXXtoQUx.js → DevServerView-qMPpnXRb.js} +2 -2
  7. package/dist/client/assets/{DirectoryPicker-izgMlS27.js → DirectoryPicker-CTwgv9LY.js} +1 -1
  8. package/dist/client/assets/DirectoryPicker-DzKVmxOf.css +1 -0
  9. package/dist/client/assets/{DocumentsView-DkkoHRwL.js → DocumentsView-DOz1KFGN.js} +1 -1
  10. package/dist/client/assets/{InsightsView-DaRtUPHX.js → InsightsView-CHZTJUic.js} +2 -2
  11. package/dist/client/assets/MemoryView-V0QdeO3e.js +2 -0
  12. package/dist/client/assets/{NodesView-BsUk_oiU.js → NodesView-BtGNRj2z.js} +1 -1
  13. package/dist/client/assets/PiExtensionsManager-D9Ye2Vak.js +11 -0
  14. package/dist/client/assets/PiExtensionsManager-kgTOHPE9.css +1 -0
  15. package/dist/client/assets/PluginManager-DRiIqol2.css +1 -0
  16. package/dist/client/assets/PluginManager-LeHp0jJ_.js +1 -0
  17. package/dist/client/assets/{RoadmapsView-SQol126Y.js → RoadmapsView-C413ISVU.js} +2 -2
  18. package/dist/client/assets/SettingsModal--vWmKBpT.css +1 -0
  19. package/dist/client/assets/SettingsModal-BZLL2xAP.js +31 -0
  20. package/dist/client/assets/SettingsModal-CDWvhvrd.css +1 -0
  21. package/dist/client/assets/SettingsModal-olTBmYJs.js +1 -0
  22. package/dist/client/assets/SetupWizardModal-BMa6p24b.css +1 -0
  23. package/dist/client/assets/{SetupWizardModal-CQc1uGSq.js → SetupWizardModal-WdaR2eQQ.js} +1 -1
  24. package/dist/client/assets/SkillsView-BcE57w8i.js +1 -0
  25. package/dist/client/assets/{folder-open-CI4TCD7P.js → folder-open-Ec4hU1xL.js} +1 -1
  26. package/dist/client/assets/index-CCYdhck-.js +616 -0
  27. package/dist/client/assets/index-lJ5WOmO9.css +1 -0
  28. package/dist/client/assets/{upload-CAlKC4qI.js → upload-BksRDuGJ.js} +1 -1
  29. package/dist/client/assets/users-EFU4n9Qr.js +6 -0
  30. package/dist/client/index.html +2 -2
  31. package/dist/extension.js +1266 -205
  32. package/dist/pi-claude-cli/index.ts +72 -28
  33. package/dist/pi-claude-cli/package.json +1 -1
  34. package/dist/pi-claude-cli/src/__tests__/event-bridge.test.ts +34 -0
  35. package/dist/pi-claude-cli/src/__tests__/mcp-config.test.ts +22 -0
  36. package/dist/pi-claude-cli/src/__tests__/prompt-builder.test.ts +72 -10
  37. package/dist/pi-claude-cli/src/__tests__/provider.test.ts +9 -9
  38. package/dist/pi-claude-cli/src/event-bridge.ts +17 -6
  39. package/dist/pi-claude-cli/src/mcp-config.ts +36 -3
  40. package/dist/pi-claude-cli/src/prompt-builder.ts +111 -7
  41. package/dist/pi-claude-cli/src/provider.ts +18 -2
  42. package/package.json +6 -5
  43. package/skill/fusion/SKILL.md +6 -1
  44. package/skill/fusion/references/engine-tools.md +54 -0
  45. package/skill/fusion/references/extension-tools.md +83 -84
  46. package/skill/fusion/references/fusion-capabilities.md +33 -31
  47. package/dist/client/assets/ChatView-CH9T0dDs.js +0 -1
  48. package/dist/client/assets/MemoryView-85NKuU3h.js +0 -2
  49. package/dist/client/assets/PiExtensionsManager-BF5pxrSE.js +0 -11
  50. package/dist/client/assets/PiExtensionsManager-K7HQ08L4.css +0 -1
  51. package/dist/client/assets/PluginManager-ccq3uK50.css +0 -1
  52. package/dist/client/assets/PluginManager-s6btydh5.js +0 -1
  53. package/dist/client/assets/SkillsView-BtUhs_QW.js +0 -1
  54. package/dist/client/assets/index-Ct-OqLpP.css +0 -1
  55. package/dist/client/assets/index-rNf7s96d.js +0 -649
package/dist/extension.js CHANGED
@@ -106,6 +106,7 @@ var init_settings_schema = __esm({
106
106
  pollIntervalMs: 15e3,
107
107
  heartbeatMultiplier: 1,
108
108
  groupOverlappingFiles: true,
109
+ overlapIgnorePaths: [],
109
110
  autoMerge: true,
110
111
  mergeStrategy: "direct",
111
112
  pushAfterMerge: false,
@@ -1323,11 +1324,6 @@ Your job: take a rough task description and produce a fully specified PROMPT.md
1323
1324
  ## What you produce
1324
1325
  Write a complete PROMPT.md specification to the given path using the write tool.
1325
1326
 
1326
- **Save your planning output as a task document** for downstream executor continuity:
1327
- - Use \`task_document_write(key="plan", content="...")\` to save a structured summary of your planning
1328
- - Include key decisions, approach rationale, architectural choices, and acceptance criteria
1329
- - Future executors will be able to read your plan via \`task_document_read(key="plan")\`
1330
-
1331
1327
  ## PROMPT.md Format
1332
1328
 
1333
1329
  Follow this structure exactly:
@@ -1391,52 +1387,169 @@ Follow this structure exactly:
1391
1387
  - [ ] Fix all failures
1392
1388
  - [ ] Build passes
1393
1389
 
1394
- ### Step N: Documentation & Delivery
1390
+ ### Step {N}: Documentation & Delivery
1395
1391
 
1396
- - [ ] Update documentation
1397
- - [ ] Final verification
1392
+ - [ ] Update relevant documentation
1393
+ - [ ] Save documentation deliverables as task documents via \`fn_task_document_write\` (key="docs", content=...)
1394
+ - [ ] Out-of-scope findings created as new tasks via \`fn_task_create\` tool
1398
1395
 
1399
1396
  ## Documentation Requirements
1400
1397
 
1401
1398
  **Must Update:**
1402
- - {Files that MUST be updated}
1399
+ - \`path/to/doc.md\` \u2014 {what to add/change}
1403
1400
 
1404
1401
  **Check If Affected:**
1405
- - {Files to check}
1402
+ - \`path/to/doc.md\` \u2014 {update if relevant}
1406
1403
 
1407
1404
  ## Completion Criteria
1408
1405
 
1409
1406
  - [ ] All steps complete
1410
1407
  - [ ] Lint passing
1411
1408
  - [ ] All tests passing
1412
- - [ ] Build passing
1409
+ - [ ] Typecheck passing (if available)
1413
1410
  - [ ] Documentation updated
1414
1411
 
1415
1412
  ## Git Commit Convention
1416
1413
 
1417
- Commits at step boundaries. All commits include the task ID.
1414
+ Commits at step boundaries. All commits include the task ID:
1415
+
1416
+ - **Step completion:** \`feat({ID}): complete Step N \u2014 description\`
1417
+ - **Bug fixes:** \`fix({ID}): description\`
1418
+ - **Tests:** \`test({ID}): description\`
1418
1419
 
1419
1420
  ## Do NOT
1420
1421
 
1421
- - {Things to avoid}
1422
+ - Expand task scope
1423
+ - Skip tests
1424
+ - Refuse necessary fixes just because they touch files outside the initial File Scope
1425
+ - Commit without the task ID prefix
1426
+ - Remove, delete, or gut modules, settings, interfaces, exports, or test files outside the File Scope
1427
+ - Remove features as "cleanup" \u2014 if something seems unused, create a task via \`fn_task_create\`
1428
+
1429
+ ## Changeset Requirements
1430
+
1431
+ If this task REMOVES existing functionality (deleting modules, settings, API endpoints, or exports), a changeset file is REQUIRED:
1432
+ - Create \`.changeset/{task-id}-removal.md\` explaining what was removed and why
1433
+ - This is mandatory for any net-negative change (more deletions than additions to existing files)
1422
1434
  \`\`\`
1423
1435
 
1424
- ## Key rules
1425
-
1426
- 1. **Size estimation:** S = 1-2 files, clear change. M = 3-8 files, moderate complexity. L = 8+ files, architecture changes, or security-sensitive.
1427
- 2. **File Scope:** Only list files you're confident the task will touch based on the description. When uncertain, list the module/directory with a wildcard.
1428
- 3. **Steps:** Each step should be independently committable and testable. Include a preflight step (Step 0) that validates preconditions.
1429
- 4. **Review Level:**
1430
- - 0 (None): Trivial changes, config updates, 1-file fixes
1431
- - 1 (Plan Only): New features, moderate changes
1432
- - 2 (Plan+Code): Architecture changes, multi-package changes
1433
- - 3 (Full): Security-sensitive, database migrations, breaking changes
1434
- - Score each task 0-8 across: Blast radius (0-2), Pattern novelty (0-2), Security sensitivity (0-2), Reversibility (0-2)
1435
- 5. **No placeholders:** Every section must have real content. No "TBD" or "fill in later".
1436
- 6. **Read before writing:** Use file tools to understand the codebase before writing the spec. Your spec must be grounded in real code paths.
1437
- 7. **Dependencies:** Check existing tasks. If this task depends on another, list it explicitly. If no dependencies, state "None" explicitly.
1438
- 8. **Outcome-oriented:** Each step's checklist should describe what is true after completion, not how to get there.
1439
- 9. **Be specific about tests:** Don't say "write tests" \u2014 specify what to test and what assertions to verify.`;
1436
+ ## Testing requirements
1437
+
1438
+ The Testing & Verification step MUST require REAL automated tests \u2014 actual test
1439
+ files with assertions that run via a test runner. Typechecks and builds are NOT
1440
+ tests. Manual verification is NOT a test.
1441
+
1442
+ - Each implementation step should include writing tests for the code being changed
1443
+ - The final Testing step runs lint, the FULL test suite, and project typecheck when the repo exposes one
1444
+ - Specs must instruct executors to fix lint failures and quality-gate failures directly, even when the required edits extend beyond the original File Scope
1445
+ - If the project has no test framework, the Testing step must include setting one up
1446
+ as part of this task (not just skipping tests)
1447
+
1448
+ ## Duplicate check
1449
+ Before writing a spec, call \`fn_task_list\` to see existing tasks.
1450
+ If a task already covers the same work (even if worded differently), do NOT
1451
+ write a PROMPT.md. Instead, write a single line to the output file:
1452
+ \`DUPLICATE: {existing-task-id}\`
1453
+
1454
+ ## Dependency awareness
1455
+ When you plan to list a task in the \`## Dependencies\` section, first call \`fn_task_get\` on that task ID to read its PROMPT.md.
1456
+ Use what you learn \u2014 file scope, APIs, patterns, completion criteria \u2014 to make the new spec accurate: reference the right paths, avoid conflicting assumptions, and describe what the dependency must deliver before this task starts.
1457
+ If the dependency task has no PROMPT.md yet (not yet specified), note that in the Dependencies section.
1458
+
1459
+ ## Triage subtask breakdown
1460
+ When the task includes \`breakIntoSubtasks: true\`, first decide whether it should be split.
1461
+
1462
+ - Split only when the work is meaningfully decomposable into 2-5 independently executable child tasks.
1463
+ - If splitting: use the \`fn_task_create\` tool to create child tasks in triage, include clear descriptions and dependencies between them, then stop. Do NOT write a PROMPT.md for the parent task.
1464
+ - **CRITICAL \u2014 subtask dependencies:** the parent task is deleted once all subtasks are created. \`dependencies\` on a new subtask may ONLY reference sibling subtasks you have created earlier in this same split (or unrelated existing tasks). **Never depend on the parent task's id.** If a child conceptually "waits for the parent's remaining work", create a sibling subtask that does that work and depend on the sibling instead. The \`fn_task_create\` tool will reject parent-id dependencies with an error.
1465
+ - If not splitting: proceed with a normal PROMPT.md specification.
1466
+
1467
+ ## Proactive Subtask Breakdown for M/L Tasks
1468
+ For tasks you assess as Size M or L, proactively evaluate whether splitting into 2-5 child tasks would improve execution quality and reliability.
1469
+
1470
+ **Strongly recommend splitting when ANY of these apply:**
1471
+ - The task will require MORE THAN 7 implementation steps
1472
+ - The task affects MORE THAN 3 different packages/modules
1473
+ - Any single step would take more than 1-2 hours to complete
1474
+ - The task has multiple independent deliverables that could be developed in parallel
1475
+
1476
+ **ANTI-PATTERN:** Avoid writing single tasks with 10+ steps. If you find yourself planning more than 7 steps, STOP and create 2-5 child tasks instead.
1477
+
1478
+ **Splitting guidance:**
1479
+ - Even when \`breakIntoSubtasks\` is not set to \`true\`, apply these thresholds proactively
1480
+ - Keep explicit user intent first: when \`breakIntoSubtasks: true\`, follow the mandatory breakdown flow above
1481
+ - Size S tasks should generally NOT be split because the overhead usually outweighs the benefit
1482
+ - Only keep a task as one unit if it genuinely has 5 or fewer focused steps with a clear scope
1483
+ - If you decide not to split an M/L task, proceed with a normal PROMPT.md specification
1484
+
1485
+ ## Triage tools
1486
+ You have these extra tools during triage:
1487
+ - \`fn_task_list\` \u2014 list existing active tasks
1488
+ - \`fn_task_get\` \u2014 inspect a task and its PROMPT.md
1489
+ - \`fn_task_create\` \u2014 create a child/follow-up task while triaging
1490
+ - \`fn_task_document_write\` \u2014 save a planning document (e.g., key="plan")
1491
+ - \`fn_task_document_read\` \u2014 read back a previously saved document
1492
+
1493
+ When the planning conversation produces a structured plan, save it as a document with \`fn_task_document_write(key='plan', content='...')\` so the executor can reference it during implementation.
1494
+
1495
+ ## Guidelines
1496
+ - Read the project structure and relevant source files to understand context BEFORE writing
1497
+ - Be specific \u2014 name actual files, functions, and patterns from the codebase
1498
+ - Steps should express OUTCOMES, not micro-instructions (2-5 checkboxes per step)
1499
+ - Always include a testing step and a documentation step
1500
+ - For tasks whose primary deliverable is documentation (updating docs, writing README, API references), include an explicit step or checkbox instructing the executor to save the final documentation content via \`fn_task_document_write\`
1501
+ - Include a "Do NOT" section with project-appropriate guardrails
1502
+ - Size assessment: S (<2h), M (2-4h), L (4-8h). Split if XL (8h+)
1503
+ - Review level scoring: Blast radius (0-2), Pattern novelty (0-2), Security (0-2), Reversibility (0-2)
1504
+ - 0-1 \u2192 Level 0, 2-3 \u2192 Level 1, 4-5 \u2192 Level 2, 6-8 \u2192 Level 3
1505
+
1506
+ ## Project commands
1507
+ When the user prompt includes a "Project Commands" section with test and/or build
1508
+ commands, use those EXACT commands in the testing/verification steps and anywhere
1509
+ the spec references running tests or builds. Do NOT guess or infer commands from
1510
+ package.json when explicit commands are provided.
1511
+
1512
+ ## Spec Review
1513
+
1514
+ After writing the PROMPT.md, call \`fn_review_spec()\` to get an independent quality review.
1515
+
1516
+ - **APPROVE** \u2192 your spec is accepted, you're done
1517
+ - **REVISE** \u2192 fix the issues described in the review feedback, rewrite the PROMPT.md, and call \`fn_review_spec()\` again. Repeat until approved.
1518
+ - **RETHINK** \u2192 your approach was fundamentally rejected. The conversation will rewind. Read the feedback carefully and take a completely different approach. Do NOT repeat the rejected strategy.
1519
+
1520
+ You MUST call \`fn_review_spec()\` after writing the PROMPT.md. Do not finish without getting an APPROVE verdict.
1521
+
1522
+ ## Output
1523
+ Write the PROMPT.md directly using the write tool, then call \`fn_review_spec()\` for review.
1524
+
1525
+ ## Frontend UX Criteria Injection
1526
+
1527
+ <!-- UX criteria mirror the "frontend-ux-design" reviewer persona in packages/core/src/types.ts \u2014 keep them aligned. -->
1528
+
1529
+ If the derived **File Scope** touches any of the following paths:
1530
+ - \`packages/dashboard/**\`
1531
+ - \`packages/*/app/components/**\`
1532
+ - \`packages/*/app/hooks/**\`
1533
+ - Any \`*.css\` or \`*.tsx\` file inside a dashboard-like package
1534
+
1535
+ \u2026then **PREPEND** a \`## Frontend UX Criteria\` section to the generated PROMPT.md, placed immediately after the \`## Mission\` section.
1536
+
1537
+ Use this exact checklist (keep it verbatim \u2014 do not expand or reorder):
1538
+
1539
+ \`\`\`markdown
1540
+ ## Frontend UX Criteria
1541
+
1542
+ - [ ] **Design tokens only** \u2014 no hardcoded \`px\` values except \`0\`, no hardcoded hex/rgb colors; use CSS custom properties (\`--color-*\`, \`--spacing-*\`, etc.)
1543
+ - [ ] **Icon sizing** \u2014 match the surrounding component's icon size convention (default lucide size unless the local pattern already uses an explicit \`size={N}\`)
1544
+ - [ ] **Semantic color tokens for status** \u2014 use \`--color-error\` for stderr/error states, \`--color-warning\` for starting/pending states; never hardcode status colors
1545
+ - [ ] **Component reuse** \u2014 reach for existing classes (\`.btn\`, \`.btn-icon\`, \`.card\`, \`.input\`) before writing one-off styles
1546
+ - [ ] **Responsive scaffolding** \u2014 add \`@media (max-width: 768px)\` overrides for any new layout; verify mobile usability
1547
+ - [ ] **Single canonical nav destination** \u2014 each route must appear in exactly one of: Header primary nav, Header overflow menu, or MobileNavBar More; no duplicates across all three
1548
+ - [ ] **Status-indicator dot convention** \u2014 use the existing \`.status-dot\` pattern (size, border, animation) rather than custom dot styling
1549
+ - [ ] **Visual hierarchy preserved** \u2014 new elements must not disrupt heading levels, content flow, or information architecture established in the surrounding page
1550
+ \`\`\`
1551
+
1552
+ Only inject this section when the task genuinely touches frontend UI. Omit it for backend-only, config-only, or documentation-only tasks.`;
1440
1553
  REVIEWER_PROMPT_TEXT = `You are an independent code and plan reviewer.
1441
1554
 
1442
1555
  You provide quality assessment for task implementations. You have full read
@@ -2075,7 +2188,7 @@ var init_db = __esm({
2075
2188
  "../core/src/db.ts"() {
2076
2189
  "use strict";
2077
2190
  init_types();
2078
- SCHEMA_VERSION = 45;
2191
+ SCHEMA_VERSION = 46;
2079
2192
  SCHEMA_SQL = `
2080
2193
  -- Tasks table with JSON columns for nested data
2081
2194
  CREATE TABLE IF NOT EXISTS tasks (
@@ -2549,6 +2662,33 @@ CREATE INDEX IF NOT EXISTS idxProjectInsightsCategory
2549
2662
  -- Index for filtering runs by projectId
2550
2663
  CREATE INDEX IF NOT EXISTS idxInsightRunsProjectId
2551
2664
  ON project_insight_runs(projectId);
2665
+
2666
+ -- Todo list persistence tables (FN-2575)
2667
+ -- Project-scoped todo lists and ordered checklist items
2668
+
2669
+ CREATE TABLE IF NOT EXISTS todo_lists (
2670
+ id TEXT PRIMARY KEY,
2671
+ projectId TEXT NOT NULL,
2672
+ title TEXT NOT NULL,
2673
+ createdAt TEXT NOT NULL,
2674
+ updatedAt TEXT NOT NULL
2675
+ );
2676
+
2677
+ CREATE TABLE IF NOT EXISTS todo_items (
2678
+ id TEXT PRIMARY KEY,
2679
+ listId TEXT NOT NULL,
2680
+ text TEXT NOT NULL,
2681
+ completed INTEGER NOT NULL DEFAULT 0,
2682
+ completedAt TEXT,
2683
+ sortOrder INTEGER NOT NULL DEFAULT 0,
2684
+ createdAt TEXT NOT NULL,
2685
+ updatedAt TEXT NOT NULL,
2686
+ FOREIGN KEY (listId) REFERENCES todo_lists(id) ON DELETE CASCADE
2687
+ );
2688
+
2689
+ CREATE INDEX IF NOT EXISTS idxTodoListsProjectId ON todo_lists(projectId);
2690
+ CREATE INDEX IF NOT EXISTS idxTodoItemsListId ON todo_items(listId);
2691
+ CREATE INDEX IF NOT EXISTS idxTodoItemsSortOrder ON todo_items(listId, sortOrder);
2552
2692
  `;
2553
2693
  Database = class {
2554
2694
  db;
@@ -3457,6 +3597,35 @@ CREATE INDEX IF NOT EXISTS idxInsightRunsProjectId
3457
3597
  this.addColumnIfMissing("tasks", "sourceIssueUrl", "TEXT");
3458
3598
  });
3459
3599
  }
3600
+ if (version < 46) {
3601
+ this.applyMigration(46, () => {
3602
+ this.db.exec(`
3603
+ CREATE TABLE IF NOT EXISTS todo_lists (
3604
+ id TEXT PRIMARY KEY,
3605
+ projectId TEXT NOT NULL,
3606
+ title TEXT NOT NULL,
3607
+ createdAt TEXT NOT NULL,
3608
+ updatedAt TEXT NOT NULL
3609
+ )
3610
+ `);
3611
+ this.db.exec(`
3612
+ CREATE TABLE IF NOT EXISTS todo_items (
3613
+ id TEXT PRIMARY KEY,
3614
+ listId TEXT NOT NULL,
3615
+ text TEXT NOT NULL,
3616
+ completed INTEGER NOT NULL DEFAULT 0,
3617
+ completedAt TEXT,
3618
+ sortOrder INTEGER NOT NULL DEFAULT 0,
3619
+ createdAt TEXT NOT NULL,
3620
+ updatedAt TEXT NOT NULL,
3621
+ FOREIGN KEY (listId) REFERENCES todo_lists(id) ON DELETE CASCADE
3622
+ )
3623
+ `);
3624
+ this.db.exec("CREATE INDEX IF NOT EXISTS idxTodoListsProjectId ON todo_lists(projectId)");
3625
+ this.db.exec("CREATE INDEX IF NOT EXISTS idxTodoItemsListId ON todo_items(listId)");
3626
+ this.db.exec("CREATE INDEX IF NOT EXISTS idxTodoItemsSortOrder ON todo_items(listId, sortOrder)");
3627
+ });
3628
+ }
3460
3629
  }
3461
3630
  /**
3462
3631
  * Run a single migration step inside a transaction and bump the version.
@@ -11030,6 +11199,225 @@ var init_insight_store = __esm({
11030
11199
  }
11031
11200
  });
11032
11201
 
11202
+ // ../core/src/todo-store.ts
11203
+ import { EventEmitter as EventEmitter8 } from "node:events";
11204
+ var TodoStore;
11205
+ var init_todo_store = __esm({
11206
+ "../core/src/todo-store.ts"() {
11207
+ "use strict";
11208
+ TodoStore = class extends EventEmitter8 {
11209
+ constructor(db) {
11210
+ super();
11211
+ this.db = db;
11212
+ this.setMaxListeners(50);
11213
+ }
11214
+ getDatabase() {
11215
+ return this.db;
11216
+ }
11217
+ generateListId() {
11218
+ const timestamp = Date.now().toString(36).toUpperCase();
11219
+ const random = Math.random().toString(36).slice(2, 6).toUpperCase();
11220
+ return `TDL-${timestamp}-${random}`;
11221
+ }
11222
+ generateItemId() {
11223
+ const timestamp = Date.now().toString(36).toUpperCase();
11224
+ const random = Math.random().toString(36).slice(2, 6).toUpperCase();
11225
+ return `TDI-${timestamp}-${random}`;
11226
+ }
11227
+ rowToTodoList(row) {
11228
+ return {
11229
+ id: row.id,
11230
+ projectId: row.projectId,
11231
+ title: row.title,
11232
+ createdAt: row.createdAt,
11233
+ updatedAt: row.updatedAt
11234
+ };
11235
+ }
11236
+ rowToTodoItem(row) {
11237
+ return {
11238
+ id: row.id,
11239
+ listId: row.listId,
11240
+ text: row.text,
11241
+ completed: row.completed === 1,
11242
+ completedAt: row.completedAt,
11243
+ sortOrder: row.sortOrder,
11244
+ createdAt: row.createdAt,
11245
+ updatedAt: row.updatedAt
11246
+ };
11247
+ }
11248
+ createList(projectId, input) {
11249
+ const now = (/* @__PURE__ */ new Date()).toISOString();
11250
+ const list = {
11251
+ id: this.generateListId(),
11252
+ projectId,
11253
+ title: input.title,
11254
+ createdAt: now,
11255
+ updatedAt: now
11256
+ };
11257
+ this.db.prepare(
11258
+ "INSERT INTO todo_lists (id, projectId, title, createdAt, updatedAt) VALUES (?, ?, ?, ?, ?)"
11259
+ ).run(list.id, list.projectId, list.title, list.createdAt, list.updatedAt);
11260
+ this.db.bumpLastModified();
11261
+ this.emit("list:created", list);
11262
+ return list;
11263
+ }
11264
+ getList(id) {
11265
+ const row = this.db.prepare("SELECT * FROM todo_lists WHERE id = ?").get(id);
11266
+ return row ? this.rowToTodoList(row) : void 0;
11267
+ }
11268
+ listLists(projectId) {
11269
+ const rows = this.db.prepare(
11270
+ "SELECT * FROM todo_lists WHERE projectId = ? ORDER BY createdAt ASC, id ASC"
11271
+ ).all(projectId);
11272
+ return rows.map((row) => this.rowToTodoList(row));
11273
+ }
11274
+ updateList(id, input) {
11275
+ const existing = this.getList(id);
11276
+ if (!existing) return void 0;
11277
+ const now = (/* @__PURE__ */ new Date()).toISOString();
11278
+ const title = input.title ?? existing.title;
11279
+ this.db.prepare("UPDATE todo_lists SET title = ?, updatedAt = ? WHERE id = ?").run(title, now, id);
11280
+ this.db.bumpLastModified();
11281
+ const updated = this.getList(id);
11282
+ this.emit("list:updated", updated);
11283
+ return updated;
11284
+ }
11285
+ deleteList(id) {
11286
+ const result = this.db.prepare("DELETE FROM todo_lists WHERE id = ?").run(id);
11287
+ if ((result.changes ?? 0) < 1) return false;
11288
+ this.db.bumpLastModified();
11289
+ this.emit("list:deleted", id);
11290
+ return true;
11291
+ }
11292
+ createItem(listId, input) {
11293
+ const list = this.getList(listId);
11294
+ if (!list) {
11295
+ throw new Error(`Todo list ${listId} not found`);
11296
+ }
11297
+ const nextSortOrder = (() => {
11298
+ if (input.sortOrder !== void 0) return input.sortOrder;
11299
+ const row = this.db.prepare("SELECT MAX(sortOrder) AS maxSortOrder FROM todo_items WHERE listId = ?").get(listId);
11300
+ return (row?.maxSortOrder ?? -1) + 1;
11301
+ })();
11302
+ const now = (/* @__PURE__ */ new Date()).toISOString();
11303
+ const item = {
11304
+ id: this.generateItemId(),
11305
+ listId,
11306
+ text: input.text,
11307
+ completed: false,
11308
+ completedAt: null,
11309
+ sortOrder: nextSortOrder,
11310
+ createdAt: now,
11311
+ updatedAt: now
11312
+ };
11313
+ this.db.prepare(
11314
+ `INSERT INTO todo_items
11315
+ (id, listId, text, completed, completedAt, sortOrder, createdAt, updatedAt)
11316
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)`
11317
+ ).run(item.id, item.listId, item.text, 0, null, item.sortOrder, item.createdAt, item.updatedAt);
11318
+ this.db.bumpLastModified();
11319
+ this.emit("item:created", item);
11320
+ return item;
11321
+ }
11322
+ getItem(id) {
11323
+ const row = this.db.prepare("SELECT * FROM todo_items WHERE id = ?").get(id);
11324
+ return row ? this.rowToTodoItem(row) : void 0;
11325
+ }
11326
+ listItems(listId) {
11327
+ const rows = this.db.prepare(
11328
+ "SELECT * FROM todo_items WHERE listId = ? ORDER BY sortOrder ASC, createdAt ASC, id ASC"
11329
+ ).all(listId);
11330
+ return rows.map((row) => this.rowToTodoItem(row));
11331
+ }
11332
+ updateItem(id, input) {
11333
+ const existing = this.getItem(id);
11334
+ if (!existing) return void 0;
11335
+ const now = (/* @__PURE__ */ new Date()).toISOString();
11336
+ const sets = ["updatedAt = ?"];
11337
+ const params = [now];
11338
+ if (input.text !== void 0) {
11339
+ sets.push("text = ?");
11340
+ params.push(input.text);
11341
+ }
11342
+ if (input.sortOrder !== void 0) {
11343
+ sets.push("sortOrder = ?");
11344
+ params.push(input.sortOrder);
11345
+ }
11346
+ if (input.completed !== void 0) {
11347
+ sets.push("completed = ?");
11348
+ params.push(input.completed ? 1 : 0);
11349
+ sets.push("completedAt = ?");
11350
+ params.push(input.completed ? now : null);
11351
+ }
11352
+ params.push(id);
11353
+ this.db.prepare(`UPDATE todo_items SET ${sets.join(", ")} WHERE id = ?`).run(...params);
11354
+ this.db.bumpLastModified();
11355
+ const updated = this.getItem(id);
11356
+ this.emit("item:updated", updated);
11357
+ return updated;
11358
+ }
11359
+ deleteItem(id) {
11360
+ const result = this.db.prepare("DELETE FROM todo_items WHERE id = ?").run(id);
11361
+ if ((result.changes ?? 0) < 1) return false;
11362
+ this.db.bumpLastModified();
11363
+ this.emit("item:deleted", id);
11364
+ return true;
11365
+ }
11366
+ toggleItem(id) {
11367
+ const existing = this.getItem(id);
11368
+ if (!existing) return void 0;
11369
+ return this.updateItem(id, { completed: !existing.completed });
11370
+ }
11371
+ reorderItems(listId, itemIds) {
11372
+ const items = this.listItems(listId);
11373
+ const existingIds = items.map((item) => item.id);
11374
+ if (new Set(itemIds).size !== itemIds.length) {
11375
+ throw new Error("Cannot reorder items: duplicate item IDs provided");
11376
+ }
11377
+ if (existingIds.length !== itemIds.length) {
11378
+ throw new Error("Cannot reorder items: provided IDs must include all items in the list");
11379
+ }
11380
+ const existingIdSet = new Set(existingIds);
11381
+ for (const itemId of itemIds) {
11382
+ if (!existingIdSet.has(itemId)) {
11383
+ throw new Error(`Cannot reorder items: item ${itemId} does not belong to list ${listId}`);
11384
+ }
11385
+ }
11386
+ const now = (/* @__PURE__ */ new Date()).toISOString();
11387
+ this.db.transaction(() => {
11388
+ for (let index = 0; index < itemIds.length; index++) {
11389
+ this.db.prepare("UPDATE todo_items SET sortOrder = ?, updatedAt = ? WHERE id = ? AND listId = ?").run(index, now, itemIds[index], listId);
11390
+ }
11391
+ });
11392
+ this.db.bumpLastModified();
11393
+ const reordered = this.listItems(listId);
11394
+ this.emit("items:reordered", { listId, items: reordered });
11395
+ return reordered;
11396
+ }
11397
+ getListsWithItems(projectId) {
11398
+ const lists = this.listLists(projectId);
11399
+ if (lists.length === 0) return [];
11400
+ const rows = this.db.prepare(
11401
+ `SELECT * FROM todo_items
11402
+ WHERE listId IN (SELECT id FROM todo_lists WHERE projectId = ?)
11403
+ ORDER BY listId ASC, sortOrder ASC, createdAt ASC, id ASC`
11404
+ ).all(projectId);
11405
+ const itemsByListId = /* @__PURE__ */ new Map();
11406
+ for (const row of rows) {
11407
+ const item = this.rowToTodoItem(row);
11408
+ const listItems = itemsByListId.get(item.listId) ?? [];
11409
+ listItems.push(item);
11410
+ itemsByListId.set(item.listId, listItems);
11411
+ }
11412
+ return lists.map((list) => ({
11413
+ ...list,
11414
+ items: itemsByListId.get(list.id) ?? []
11415
+ }));
11416
+ }
11417
+ };
11418
+ }
11419
+ });
11420
+
11033
11421
  // ../core/src/app-version.ts
11034
11422
  import { readFileSync } from "node:fs";
11035
11423
  import { join as join8, dirname as dirname2 } from "node:path";
@@ -14774,7 +15162,7 @@ var require_dist = __commonJS({
14774
15162
  });
14775
15163
 
14776
15164
  // ../core/src/node-discovery.ts
14777
- import { EventEmitter as EventEmitter8 } from "node:events";
15165
+ import { EventEmitter as EventEmitter9 } from "node:events";
14778
15166
  import os from "node:os";
14779
15167
  var import_bonjour_service, DEFAULT_DISCOVERY_CONFIG, STALE_CLEANUP_INTERVAL_MS, FUSION_VERSION, NodeDiscovery;
14780
15168
  var init_node_discovery = __esm({
@@ -14790,7 +15178,7 @@ var init_node_discovery = __esm({
14790
15178
  };
14791
15179
  STALE_CLEANUP_INTERVAL_MS = 6e4;
14792
15180
  FUSION_VERSION = "0.1.0";
14793
- NodeDiscovery = class extends EventEmitter8 {
15181
+ NodeDiscovery = class extends EventEmitter9 {
14794
15182
  config;
14795
15183
  bonjour = null;
14796
15184
  broadcastService = null;
@@ -15241,7 +15629,7 @@ var init_system_metrics = __esm({
15241
15629
  });
15242
15630
 
15243
15631
  // ../core/src/central-core.ts
15244
- import { EventEmitter as EventEmitter9 } from "node:events";
15632
+ import { EventEmitter as EventEmitter10 } from "node:events";
15245
15633
  import { createHash as createHash2, randomUUID as randomUUID4 } from "node:crypto";
15246
15634
  import { existsSync as existsSync7, statSync } from "node:fs";
15247
15635
  import { mkdir as mkdir4 } from "node:fs/promises";
@@ -15256,7 +15644,7 @@ var init_central_core = __esm({
15256
15644
  init_node_connection();
15257
15645
  init_node_discovery();
15258
15646
  init_system_metrics();
15259
- CentralCore = class extends EventEmitter9 {
15647
+ CentralCore = class extends EventEmitter10 {
15260
15648
  db = null;
15261
15649
  globalDir;
15262
15650
  initialized = false;
@@ -26907,7 +27295,7 @@ var automation_store_exports = {};
26907
27295
  __export(automation_store_exports, {
26908
27296
  AutomationStore: () => AutomationStore
26909
27297
  });
26910
- import { EventEmitter as EventEmitter10 } from "node:events";
27298
+ import { EventEmitter as EventEmitter11 } from "node:events";
26911
27299
  import { join as join12 } from "node:path";
26912
27300
  import { randomUUID as randomUUID5 } from "node:crypto";
26913
27301
  var import_cron_parser, CRON_TIMEZONE, AutomationStore;
@@ -26918,7 +27306,7 @@ var init_automation_store = __esm({
26918
27306
  init_automation();
26919
27307
  init_db();
26920
27308
  CRON_TIMEZONE = "UTC";
26921
- AutomationStore = class _AutomationStore extends EventEmitter10 {
27309
+ AutomationStore = class _AutomationStore extends EventEmitter11 {
26922
27310
  constructor(rootDir) {
26923
27311
  super();
26924
27312
  this.rootDir = rootDir;
@@ -27107,6 +27495,9 @@ var init_automation_store = __esm({
27107
27495
  async updateSchedule(id, updates) {
27108
27496
  return this.withScheduleLock(id, async () => {
27109
27497
  const schedule = await this.getSchedule(id);
27498
+ const previousEnabled = schedule.enabled;
27499
+ const previousScheduleType = schedule.scheduleType;
27500
+ const previousCronExpression = schedule.cronExpression;
27110
27501
  if (updates.name !== void 0) {
27111
27502
  if (!updates.name.trim()) throw new Error("Name cannot be empty");
27112
27503
  schedule.name = updates.name.trim();
@@ -27145,10 +27536,13 @@ var init_automation_store = __esm({
27145
27536
  if (updates.enabled !== void 0) {
27146
27537
  schedule.enabled = updates.enabled;
27147
27538
  }
27148
- if (schedule.enabled) {
27149
- schedule.nextRunAt = this.computeNextRun(schedule.cronExpression);
27150
- } else {
27539
+ const cadenceChanged = schedule.scheduleType !== previousScheduleType || schedule.cronExpression !== previousCronExpression;
27540
+ const enabledFromDisabled = !previousEnabled && schedule.enabled;
27541
+ const missingNextRunAt = !schedule.nextRunAt;
27542
+ if (!schedule.enabled) {
27151
27543
  schedule.nextRunAt = void 0;
27544
+ } else if (cadenceChanged || enabledFromDisabled || missingNextRunAt) {
27545
+ schedule.nextRunAt = this.computeNextRun(schedule.cronExpression);
27152
27546
  }
27153
27547
  schedule.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
27154
27548
  await this.persistSchedule(schedule);
@@ -28918,7 +29312,7 @@ var init_logger = __esm({
28918
29312
  });
28919
29313
 
28920
29314
  // ../core/src/store.ts
28921
- import { EventEmitter as EventEmitter11 } from "node:events";
29315
+ import { EventEmitter as EventEmitter12 } from "node:events";
28922
29316
  import { randomUUID as randomUUID6 } from "node:crypto";
28923
29317
  import { mkdir as mkdir7, readdir as readdir5, readFile as readFile8, writeFile as writeFile6, rename as rename4, unlink as unlink3 } from "node:fs/promises";
28924
29318
  import { join as join15 } from "node:path";
@@ -28974,6 +29368,7 @@ var init_store = __esm({
28974
29368
  init_plugin_store();
28975
29369
  init_roadmap_store();
28976
29370
  init_insight_store();
29371
+ init_todo_store();
28977
29372
  init_migration();
28978
29373
  init_central_core();
28979
29374
  init_task_merge();
@@ -28997,7 +29392,7 @@ var init_store = __esm({
28997
29392
  this.dependentIds = dependentIds;
28998
29393
  }
28999
29394
  };
29000
- TaskStore = class _TaskStore extends EventEmitter11 {
29395
+ TaskStore = class _TaskStore extends EventEmitter12 {
29001
29396
  constructor(rootDir, globalSettingsDir) {
29002
29397
  super();
29003
29398
  this.rootDir = rootDir;
@@ -29087,6 +29482,8 @@ var init_store = __esm({
29087
29482
  roadmapStore = null;
29088
29483
  /** Cached InsightStore instance */
29089
29484
  insightStore = null;
29485
+ /** Cached TodoStore instance */
29486
+ todoStore = null;
29090
29487
  /**
29091
29488
  * Get the SQLite database, initializing it on first access.
29092
29489
  * Also performs auto-migration from legacy file-based storage if needed.
@@ -33435,6 +33832,16 @@ ${notificationsSection}
33435
33832
  }
33436
33833
  return this.insightStore;
33437
33834
  }
33835
+ /**
33836
+ * Get the TodoStore instance for project-scoped todo list operations.
33837
+ * Lazily initializes the TodoStore on first access.
33838
+ */
33839
+ getTodoStore() {
33840
+ if (!this.todoStore) {
33841
+ this.todoStore = new TodoStore(this.db);
33842
+ }
33843
+ return this.todoStore;
33844
+ }
33438
33845
  // ── Backward Compatibility (Multi-Project Support) ────────────────────────
33439
33846
  };
33440
33847
  }
@@ -33728,6 +34135,20 @@ function updatePiExtensionDisabledIds(cwd, disabledIds, home, extraKnownIds = []
33728
34135
  `);
33729
34136
  return discoverPiExtensions(cwd, home);
33730
34137
  }
34138
+ function isExternalClaudeCliPath(p, vendoredPath) {
34139
+ if (vendoredPath && p === vendoredPath) return false;
34140
+ return /(^|[/\\])pi-claude-cli([/\\]|$)/i.test(p);
34141
+ }
34142
+ function reconcileClaudeCliPaths(paths, vendoredPath) {
34143
+ if (!vendoredPath) {
34144
+ return [...paths];
34145
+ }
34146
+ const filtered = paths.filter((p) => !isExternalClaudeCliPath(p, vendoredPath));
34147
+ if (!filtered.includes(vendoredPath)) {
34148
+ return [vendoredPath, ...filtered];
34149
+ }
34150
+ return filtered;
34151
+ }
33731
34152
  function formatPiExtensionSource(source, extensionPath, cwd, home) {
33732
34153
  const homeDir = getHomeDir(home);
33733
34154
  const projectRoot = resolvePiExtensionProjectRoot(cwd);
@@ -33943,7 +34364,7 @@ var routine_store_exports = {};
33943
34364
  __export(routine_store_exports, {
33944
34365
  RoutineStore: () => RoutineStore
33945
34366
  });
33946
- import { EventEmitter as EventEmitter12 } from "node:events";
34367
+ import { EventEmitter as EventEmitter13 } from "node:events";
33947
34368
  import { randomUUID as randomUUID7 } from "node:crypto";
33948
34369
  var import_cron_parser2, CRON_TIMEZONE2, RoutineStore;
33949
34370
  var init_routine_store = __esm({
@@ -33953,7 +34374,7 @@ var init_routine_store = __esm({
33953
34374
  init_db();
33954
34375
  init_routine();
33955
34376
  CRON_TIMEZONE2 = "UTC";
33956
- RoutineStore = class _RoutineStore extends EventEmitter12 {
34377
+ RoutineStore = class _RoutineStore extends EventEmitter13 {
33957
34378
  constructor(rootDir) {
33958
34379
  super();
33959
34380
  this.rootDir = rootDir;
@@ -34361,7 +34782,7 @@ var init_routine_store = __esm({
34361
34782
  import { basename as basename6, dirname as dirname5, extname, isAbsolute as isAbsolute5, resolve as resolve7 } from "node:path";
34362
34783
  import { copyFile, rm } from "node:fs/promises";
34363
34784
  import { pathToFileURL } from "node:url";
34364
- import { EventEmitter as EventEmitter13 } from "node:events";
34785
+ import { EventEmitter as EventEmitter14 } from "node:events";
34365
34786
  var MINIMUM_FUSION_VERSION, log, moduleImportVersion, PluginLoader;
34366
34787
  var init_plugin_loader = __esm({
34367
34788
  "../core/src/plugin-loader.ts"() {
@@ -34371,7 +34792,7 @@ var init_plugin_loader = __esm({
34371
34792
  MINIMUM_FUSION_VERSION = "0.1.0";
34372
34793
  log = createLogger("plugin-loader");
34373
34794
  moduleImportVersion = 0;
34374
- PluginLoader = class extends EventEmitter13 {
34795
+ PluginLoader = class extends EventEmitter14 {
34375
34796
  constructor(options) {
34376
34797
  super();
34377
34798
  this.options = options;
@@ -37903,15 +38324,15 @@ var require_fd_slicer = __commonJS({
37903
38324
  var Writable = stream.Writable;
37904
38325
  var PassThrough = stream.PassThrough;
37905
38326
  var Pend = require_pend();
37906
- var EventEmitter24 = __require("events").EventEmitter;
38327
+ var EventEmitter25 = __require("events").EventEmitter;
37907
38328
  exports.createFromBuffer = createFromBuffer;
37908
38329
  exports.createFromFd = createFromFd;
37909
38330
  exports.BufferSlicer = BufferSlicer;
37910
38331
  exports.FdSlicer = FdSlicer;
37911
- util.inherits(FdSlicer, EventEmitter24);
38332
+ util.inherits(FdSlicer, EventEmitter25);
37912
38333
  function FdSlicer(fd, options) {
37913
38334
  options = options || {};
37914
- EventEmitter24.call(this);
38335
+ EventEmitter25.call(this);
37915
38336
  this.fd = fd;
37916
38337
  this.pend = new Pend();
37917
38338
  this.pend.max = 1;
@@ -38055,9 +38476,9 @@ var require_fd_slicer = __commonJS({
38055
38476
  this.destroyed = true;
38056
38477
  this.context.unref();
38057
38478
  };
38058
- util.inherits(BufferSlicer, EventEmitter24);
38479
+ util.inherits(BufferSlicer, EventEmitter25);
38059
38480
  function BufferSlicer(buffer, options) {
38060
- EventEmitter24.call(this);
38481
+ EventEmitter25.call(this);
38061
38482
  options = options || {};
38062
38483
  this.refCount = 0;
38063
38484
  this.buffer = buffer;
@@ -38469,7 +38890,7 @@ var require_yauzl = __commonJS({
38469
38890
  var fd_slicer = require_fd_slicer();
38470
38891
  var crc32 = require_buffer_crc32();
38471
38892
  var util = __require("util");
38472
- var EventEmitter24 = __require("events").EventEmitter;
38893
+ var EventEmitter25 = __require("events").EventEmitter;
38473
38894
  var Transform = __require("stream").Transform;
38474
38895
  var PassThrough = __require("stream").PassThrough;
38475
38896
  var Writable = __require("stream").Writable;
@@ -38601,10 +39022,10 @@ var require_yauzl = __commonJS({
38601
39022
  callback(new Error("end of central directory record signature not found"));
38602
39023
  });
38603
39024
  }
38604
- util.inherits(ZipFile, EventEmitter24);
39025
+ util.inherits(ZipFile, EventEmitter25);
38605
39026
  function ZipFile(reader, centralDirectoryOffset, fileSize, entryCount, comment, autoClose, lazyEntries, decodeStrings, validateEntrySizes, strictFileNames) {
38606
39027
  var self = this;
38607
- EventEmitter24.call(self);
39028
+ EventEmitter25.call(self);
38608
39029
  self.reader = reader;
38609
39030
  self.reader.on("error", function(err) {
38610
39031
  emitError(self, err);
@@ -38965,9 +39386,9 @@ var require_yauzl = __commonJS({
38965
39386
  }
38966
39387
  cb();
38967
39388
  };
38968
- util.inherits(RandomAccessReader, EventEmitter24);
39389
+ util.inherits(RandomAccessReader, EventEmitter25);
38969
39390
  function RandomAccessReader() {
38970
- EventEmitter24.call(this);
39391
+ EventEmitter25.call(this);
38971
39392
  this.refCount = 0;
38972
39393
  }
38973
39394
  RandomAccessReader.prototype.ref = function() {
@@ -47259,14 +47680,14 @@ var init_agent_companies_exporter = __esm({
47259
47680
  });
47260
47681
 
47261
47682
  // ../core/src/chat-store.ts
47262
- import { EventEmitter as EventEmitter14 } from "node:events";
47683
+ import { EventEmitter as EventEmitter15 } from "node:events";
47263
47684
  import { randomUUID as randomUUID8 } from "node:crypto";
47264
47685
  var ChatStore;
47265
47686
  var init_chat_store = __esm({
47266
47687
  "../core/src/chat-store.ts"() {
47267
47688
  "use strict";
47268
47689
  init_db();
47269
- ChatStore = class extends EventEmitter14 {
47690
+ ChatStore = class extends EventEmitter15 {
47270
47691
  constructor(fusionDir, db) {
47271
47692
  super();
47272
47693
  this.fusionDir = fusionDir;
@@ -47725,6 +48146,7 @@ __export(src_exports, {
47725
48146
  THEME_MODES: () => THEME_MODES,
47726
48147
  THINKING_LEVELS: () => THINKING_LEVELS,
47727
48148
  TaskStore: () => TaskStore,
48149
+ TodoStore: () => TodoStore,
47728
48150
  VALIDATOR_RUN_STATUSES: () => VALIDATOR_RUN_STATUSES,
47729
48151
  VALID_TRANSITIONS: () => VALID_TRANSITIONS,
47730
48152
  ValidationError: () => ValidationError,
@@ -47882,6 +48304,7 @@ __export(src_exports, {
47882
48304
  readProjectMemoryFileContent: () => readProjectMemoryFileContent,
47883
48305
  readProjectMemoryWithBackend: () => readProjectMemoryWithBackend,
47884
48306
  readWorkingMemory: () => readWorkingMemory,
48307
+ reconcileClaudeCliPaths: () => reconcileClaudeCliPaths,
47885
48308
  refreshQmdProjectMemoryIndex: () => refreshQmdProjectMemoryIndex,
47886
48309
  registerMemoryBackend: () => registerMemoryBackend,
47887
48310
  renderMemoryAuditMarkdown: () => renderMemoryAuditMarkdown,
@@ -47982,6 +48405,7 @@ var init_src = __esm({
47982
48405
  init_memory_backend();
47983
48406
  init_memory_dreams();
47984
48407
  init_insight_store();
48408
+ init_todo_store();
47985
48409
  init_agent_companies_parser();
47986
48410
  init_agent_companies_exporter();
47987
48411
  init_chat_store();
@@ -49302,6 +49726,10 @@ function resolveSessionSkills(context) {
49302
49726
  function createSkillsOverrideFromSelection(selection, options = {}) {
49303
49727
  const { allowedSkillPaths, excludedSkillPaths, filterActive } = selection;
49304
49728
  const { requestedSkillNames, sessionPurpose } = options;
49729
+ const isBuiltInFallbackRequest = (name) => {
49730
+ const purposeUsesRoleFallback = sessionPurpose === "triage" || sessionPurpose === "executor" || sessionPurpose === "reviewer" || sessionPurpose === "merger";
49731
+ return purposeUsesRoleFallback && requestedSkillNames?.length === 1 && name.toLowerCase() === "fusion";
49732
+ };
49305
49733
  return (base) => {
49306
49734
  if (!filterActive) {
49307
49735
  return base;
@@ -49348,7 +49776,7 @@ function createSkillsOverrideFromSelection(selection, options = {}) {
49348
49776
  if (requestedSkillNames) {
49349
49777
  const discoveredNamesLower = new Set(base.skills.map((s) => s.name.toLowerCase()));
49350
49778
  for (const requestedName of requestedSkillNames) {
49351
- if (!discoveredNamesLower.has(requestedName.toLowerCase())) {
49779
+ if (!discoveredNamesLower.has(requestedName.toLowerCase()) && !isBuiltInFallbackRequest(requestedName)) {
49352
49780
  const purpose2 = sessionPurpose ? ` [${sessionPurpose}]` : "";
49353
49781
  newDiagnostics.push({
49354
49782
  type: "warning",
@@ -49539,18 +49967,28 @@ __export(pi_exports, {
49539
49967
  compactSessionContext: () => compactSessionContext,
49540
49968
  createFnAgent: () => createFnAgent2,
49541
49969
  describeModel: () => describeModel,
49970
+ getHostExtensionPaths: () => getHostExtensionPaths,
49542
49971
  promptWithFallback: () => promptWithFallback,
49972
+ setHostExtensionPaths: () => setHostExtensionPaths,
49543
49973
  wrapToolsWithBoundary: () => wrapToolsWithBoundary
49544
49974
  });
49545
49975
  import { existsSync as existsSync20, readFileSync as readFileSync6 } from "node:fs";
49546
49976
  import { exec } from "node:child_process";
49547
49977
  import { promisify as promisify2 } from "node:util";
49978
+ import { createRequire } from "node:module";
49548
49979
  import { basename as basename7, dirname as dirname7, join as join24, relative as relative3, isAbsolute as isAbsolute6, resolve as resolve10 } from "node:path";
49549
49980
  import {
49550
49981
  createAgentSession,
49982
+ createBashTool,
49551
49983
  createCodingTools,
49984
+ createEditTool,
49552
49985
  createExtensionRuntime,
49986
+ createFindTool,
49987
+ createGrepTool,
49988
+ createLsTool,
49553
49989
  createReadOnlyTools,
49990
+ createReadTool,
49991
+ createWriteTool,
49554
49992
  DefaultResourceLoader,
49555
49993
  DefaultPackageManager,
49556
49994
  discoverAndLoadExtensions,
@@ -49558,6 +49996,12 @@ import {
49558
49996
  SessionManager,
49559
49997
  SettingsManager
49560
49998
  } from "@mariozechner/pi-coding-agent";
49999
+ function setHostExtensionPaths(paths) {
50000
+ hostExtensionPaths = [...paths];
50001
+ }
50002
+ function getHostExtensionPaths() {
50003
+ return hostExtensionPaths;
50004
+ }
49561
50005
  function getSessionStateError(session) {
49562
50006
  const state = session.state;
49563
50007
  const error = state?.errorMessage ?? state?.error;
@@ -49958,6 +50402,21 @@ function getPackageManagerAgentDir() {
49958
50402
  }
49959
50403
  return existsSync20(fusionAgentDir) ? fusionAgentDir : legacyAgentDir;
49960
50404
  }
50405
+ function resolveVendoredClaudeCliEntry() {
50406
+ try {
50407
+ const require_ = createRequire(import.meta.url);
50408
+ const pkgJsonPath = require_.resolve("@fusion/pi-claude-cli/package.json");
50409
+ const pkgJson = JSON.parse(readFileSync6(pkgJsonPath, "utf-8"));
50410
+ const extensions = pkgJson.pi?.extensions;
50411
+ if (!Array.isArray(extensions) || extensions.length === 0) return null;
50412
+ const entry = extensions[0];
50413
+ if (typeof entry !== "string" || entry.length === 0) return null;
50414
+ const path = resolve10(dirname7(pkgJsonPath), entry);
50415
+ return existsSync20(path) ? path : null;
50416
+ } catch {
50417
+ return null;
50418
+ }
50419
+ }
49961
50420
  async function registerExtensionProviders(cwd, modelRegistry) {
49962
50421
  try {
49963
50422
  const agentDir = getPackageManagerAgentDir();
@@ -49968,8 +50427,13 @@ async function registerExtensionProviders(cwd, modelRegistry) {
49968
50427
  });
49969
50428
  const resolvedPaths = await packageManager.resolve();
49970
50429
  const packageExtensionPaths = resolvedPaths.extensions.filter((resource) => resource.enabled).map((resource) => resource.path);
49971
- const extensionsResult = await discoverAndLoadExtensions(
50430
+ const vendoredClaudeCli = resolveVendoredClaudeCliEntry();
50431
+ const reconciledPaths = reconcileClaudeCliPaths(
49972
50432
  [...getEnabledPiExtensionPaths(cwd), ...packageExtensionPaths],
50433
+ vendoredClaudeCli
50434
+ );
50435
+ const extensionsResult = await discoverAndLoadExtensions(
50436
+ reconciledPaths,
49973
50437
  cwd,
49974
50438
  join24(resolvePiExtensionProjectRoot(cwd), ".fusion", "disabled-auto-extension-discovery")
49975
50439
  );
@@ -50104,7 +50568,22 @@ async function createFnAgent2(options) {
50104
50568
  const authStorage = createFusionAuthStorage();
50105
50569
  const modelRegistry = ModelRegistry.create(authStorage, getModelRegistryModelsPath());
50106
50570
  await registerExtensionProviders(options.cwd, modelRegistry);
50107
- const tools = options.tools === "readonly" ? createReadOnlyTools(options.cwd) : createCodingTools(options.cwd);
50571
+ const tools = options.tools === "readonly" ? [
50572
+ createReadTool(options.cwd),
50573
+ createGrepTool(options.cwd),
50574
+ createFindTool(options.cwd),
50575
+ createLsTool(options.cwd)
50576
+ ] : [
50577
+ createReadTool(options.cwd),
50578
+ createBashTool(options.cwd),
50579
+ createEditTool(options.cwd),
50580
+ createWriteTool(options.cwd),
50581
+ createGrepTool(options.cwd),
50582
+ createFindTool(options.cwd),
50583
+ createLsTool(options.cwd)
50584
+ ];
50585
+ void createCodingTools;
50586
+ void createReadOnlyTools;
50108
50587
  const worktreePath = options.cwd;
50109
50588
  const projectRoot = getProjectRootFromWorktree(worktreePath);
50110
50589
  if (projectRoot) {
@@ -50156,6 +50635,10 @@ async function createFnAgent2(options) {
50156
50635
  settingsManager,
50157
50636
  systemPromptOverride: () => options.systemPrompt,
50158
50637
  appendSystemPromptOverride: () => [],
50638
+ // Inject host-supplied extension paths (e.g. cli's own `@runfusion/fusion`
50639
+ // extension that registers `fn_*` tools) so they're loaded inside every
50640
+ // agent session, including chat sessions that don't pass `customTools`.
50641
+ ...hostExtensionPaths.length > 0 ? { additionalExtensionPaths: [...hostExtensionPaths] } : {},
50159
50642
  ...skillsOverrideFn ? { skillsOverride: skillsOverrideFn } : {}
50160
50643
  });
50161
50644
  await resourceLoader.reload();
@@ -50332,7 +50815,7 @@ async function createFnAgent2(options) {
50332
50815
  });
50333
50816
  return { session: promptableSession, sessionFile: promptableSession.sessionFile };
50334
50817
  }
50335
- var execAsync, FN_MEMORY_APPEND_TOOL_NAME, COMPACTION_FALLBACK_INSTRUCTIONS, MAX_COMPACTED_PROMPT_MEMORY_CHARS;
50818
+ var execAsync, hostExtensionPaths, FN_MEMORY_APPEND_TOOL_NAME, COMPACTION_FALLBACK_INSTRUCTIONS, MAX_COMPACTED_PROMPT_MEMORY_CHARS;
50336
50819
  var init_pi = __esm({
50337
50820
  "../engine/src/pi.ts"() {
50338
50821
  "use strict";
@@ -50342,6 +50825,7 @@ var init_pi = __esm({
50342
50825
  init_auth_storage();
50343
50826
  init_logger2();
50344
50827
  execAsync = promisify2(exec);
50828
+ hostExtensionPaths = [];
50345
50829
  FN_MEMORY_APPEND_TOOL_NAME = "fn_memory_append";
50346
50830
  COMPACTION_FALLBACK_INSTRUCTIONS = [
50347
50831
  "Summarize all completed steps concisely.",
@@ -50676,10 +51160,10 @@ var init_session_skill_context = __esm({
50676
51160
  "../engine/src/session-skill-context.ts"() {
50677
51161
  "use strict";
50678
51162
  ROLE_FALLBACK_SKILLS = {
50679
- triage: ["triage"],
50680
- executor: ["executor"],
50681
- reviewer: ["reviewer"],
50682
- merger: ["merger"]
51163
+ triage: ["fusion"],
51164
+ executor: ["fusion"],
51165
+ reviewer: ["fusion"],
51166
+ merger: ["fusion"]
50683
51167
  };
50684
51168
  SKILL_DIAGNOSTIC_MESSAGES = {
50685
51169
  missing: (skillName) => `skill selection: requested skill "${skillName}" not found in discovered skills`,
@@ -51711,7 +52195,7 @@ ${isRevision ? "1. Review the existing specification and user feedback carefully
51711
52195
 
51712
52196
  Use the write tool to write the specification file.${commandsSection}${memorySection}${attachmentsSection}${userCommentsSection}`;
51713
52197
  }
51714
- var TRIAGE_SYSTEM_PROMPT, TriageProcessor, IMAGE_MIME_TYPES, TEXT_INLINE_LIMIT;
52198
+ var TRIAGE_SYSTEM_PROMPT, FAST_TRIAGE_SYSTEM_PROMPT, TriageProcessor, IMAGE_MIME_TYPES, TEXT_INLINE_LIMIT;
51715
52199
  var init_triage = __esm({
51716
52200
  "../engine/src/triage.ts"() {
51717
52201
  "use strict";
@@ -51966,6 +52450,148 @@ Use this exact checklist (keep it verbatim \u2014 do not expand or reorder):
51966
52450
  \`\`\`
51967
52451
 
51968
52452
  Only inject this section when the task genuinely touches frontend UI. Omit it for backend-only, config-only, or documentation-only tasks.`;
52453
+ FAST_TRIAGE_SYSTEM_PROMPT = `You are a task specification agent for "fn", an AI-orchestrated task board. This task is running in **fast mode** \u2014 produce a lean, executable PROMPT.md without heavyweight review scoring or subtask analysis.
52454
+
52455
+ Your job: turn a rough task description into a focused PROMPT.md another agent can execute autonomously.
52456
+
52457
+ ## What you produce
52458
+ Write a complete PROMPT.md specification to the given path using the write tool.
52459
+
52460
+ ## PROMPT.md Format
52461
+
52462
+ Follow this structure exactly:
52463
+
52464
+ \`\`\`markdown
52465
+ # Task: {ID} - {Name}
52466
+
52467
+ **Created:** {YYYY-MM-DD}
52468
+ **Size:** {S | M}
52469
+
52470
+ ## Mission
52471
+
52472
+ {One paragraph: what to build and why it matters}
52473
+
52474
+ ## Dependencies
52475
+
52476
+ - **None**
52477
+ {OR}
52478
+ - **Task:** {ID} ({what must be complete first})
52479
+
52480
+ ## Context to Read First
52481
+
52482
+ {List the minimal, specific files needed for implementation}
52483
+
52484
+ ## File Scope
52485
+
52486
+ {List exact files/directories expected to change}
52487
+
52488
+ - \`path/to/file.ext\`
52489
+ - \`path/to/directory/*\`
52490
+
52491
+ ## Steps
52492
+
52493
+ ### Step 0: Preflight
52494
+
52495
+ - [ ] Required files and paths exist
52496
+ - [ ] Dependencies satisfied
52497
+
52498
+ ### Step 1: {Implementation step name}
52499
+
52500
+ - [ ] {Specific, verifiable outcome}
52501
+ - [ ] {Specific, verifiable outcome}
52502
+ - [ ] Run targeted tests for changed files
52503
+
52504
+ **Artifacts:**
52505
+ - \`path/to/file\` (new | modified)
52506
+
52507
+ ### Step {N-1}: Testing & Verification
52508
+
52509
+ > ZERO test failures allowed. Full test suite as quality gate.
52510
+ > If keeping lint/tests/build/typecheck green requires edits outside the initial File Scope, make those fixes as part of this task.
52511
+
52512
+ - [ ] Run lint check (\`pnpm lint\`)
52513
+ - [ ] Run full test suite
52514
+ - [ ] Run project typecheck if available
52515
+ - [ ] Build passes
52516
+
52517
+ ### Step {N}: Documentation & Delivery
52518
+
52519
+ - [ ] Update relevant documentation
52520
+ - [ ] Save documentation deliverables as task documents via \`fn_task_document_write\` (key="docs", content=...)
52521
+ - [ ] Create out-of-scope follow-up tasks via \`fn_task_create\` when needed
52522
+
52523
+ ## Documentation Requirements
52524
+
52525
+ **Must Update:**
52526
+ - \`path/to/doc.md\` \u2014 {what to add/change}
52527
+
52528
+ **Check If Affected:**
52529
+ - \`path/to/doc.md\` \u2014 {update if relevant}
52530
+
52531
+ ## Completion Criteria
52532
+
52533
+ - [ ] All steps complete
52534
+ - [ ] Lint passing
52535
+ - [ ] All tests passing
52536
+ - [ ] Typecheck passing (if available)
52537
+ - [ ] Documentation updated
52538
+
52539
+ ## Git Commit Convention
52540
+
52541
+ Commits at step boundaries. All commits include the task ID:
52542
+
52543
+ - **Step completion:** \`feat({ID}): complete Step N \u2014 description\`
52544
+ - **Bug fixes:** \`fix({ID}): description\`
52545
+ - **Tests:** \`test({ID}): description\`
52546
+
52547
+ ## Do NOT
52548
+
52549
+ - Expand task scope
52550
+ - Skip tests
52551
+ - Refuse necessary fixes just because they touch files outside the initial File Scope
52552
+ - Commit without the task ID prefix
52553
+ - Remove, delete, or gut modules, settings, interfaces, exports, or test files outside the File Scope
52554
+ - Remove features as "cleanup" \u2014 if something seems unused, create a task via \`fn_task_create\`
52555
+
52556
+ ## Changeset Requirements
52557
+
52558
+ If this task REMOVES existing functionality (deleting modules, settings, API endpoints, or exports), a changeset file is REQUIRED:
52559
+ - Create \`.changeset/{task-id}-removal.md\` explaining what was removed and why
52560
+ - This is mandatory for any net-negative change (more deletions than additions to existing files)
52561
+ \`\`\`
52562
+
52563
+ ## Testing requirements
52564
+ - Require real automated tests with assertions that run in the project's test runner
52565
+ - Typecheck/build/manual checks are not tests and cannot replace tests
52566
+ - Include targeted tests in implementation steps and full quality-gate runs in final verification
52567
+
52568
+ ## Duplicate check
52569
+ Before writing a spec, call \`fn_task_list\` to find existing active tasks.
52570
+ If an existing task already covers the same work, do NOT write a PROMPT.md. Instead write exactly:
52571
+ \`DUPLICATE: {existing-task-id}\`
52572
+
52573
+ ## Dependency awareness
52574
+ When adding a dependency in \`## Dependencies\`, first call \`fn_task_get\` for that task and read its PROMPT.md.
52575
+ Use that context to align file paths, APIs, assumptions, and completion expectations. If the dependency has no PROMPT.md yet, note that explicitly.
52576
+
52577
+ ## Guidelines
52578
+ - Read relevant source files before writing the spec
52579
+ - Be specific: reference concrete files, modules, and commands from this repo
52580
+ - Keep steps outcome-focused with 2\u20134 checkboxes per step
52581
+ - Always include Testing & Verification and Documentation & Delivery steps
52582
+ - Keep fast-mode scope lean and executable; do not add heavyweight review scoring or subtask-analysis sections
52583
+
52584
+ ## Project commands
52585
+ When the user prompt includes explicit test/build commands, use those exact commands in the generated spec.
52586
+
52587
+ ## Spec Review
52588
+
52589
+ After writing the PROMPT.md, call \`fn_review_spec()\` to confirm the spec.
52590
+
52591
+ Fast-mode specs are auto-approved \u2014 the review tool will return APPROVE immediately without spawning an independent reviewer. You do NOT need to wait for or iterate on review feedback.
52592
+
52593
+ ## Output
52594
+ Write the PROMPT.md directly using the write tool, then call \`fn_review_spec()\` to confirm.`;
51969
52595
  TriageProcessor = class _TriageProcessor {
51970
52596
  /**
51971
52597
  * @param store — Task store instance (also used to listen for `settings:updated` events)
@@ -52234,6 +52860,7 @@ Only inject this section when the task genuinely touches frontend UI. Omit it fo
52234
52860
  const detail = await this.store.getTask(task.id);
52235
52861
  const settings = await this.store.getSettings();
52236
52862
  const promptPath = `.fusion/tasks/${task.id}/PROMPT.md`;
52863
+ const isFast = task.executionMode === "fast";
52237
52864
  const agentWork = async () => {
52238
52865
  await this.store.updateTask(task.id, { status: "specifying" });
52239
52866
  const stuckDetector = this.options.stuckTaskDetector;
@@ -52279,7 +52906,8 @@ Only inject this section when the task genuinely touches frontend UI. Omit it fo
52279
52906
  checkpointRef,
52280
52907
  specReviewVerdictRef,
52281
52908
  approvedCommentFingerprintRef,
52282
- settings
52909
+ settings,
52910
+ isFast
52283
52911
  )
52284
52912
  ];
52285
52913
  let triageInstructions = "";
@@ -52297,8 +52925,9 @@ Only inject this section when the task genuinely touches frontend UI. Omit it fo
52297
52925
  triageLog.warn(`${task.id}: failed to resolve triage agent instructions, continuing with defaults: ${msg}`);
52298
52926
  }
52299
52927
  }
52928
+ triageLog.log(`${task.id}: specifying in ${isFast ? "fast" : "standard"} mode`);
52300
52929
  const triageSystemPrompt = buildSystemPromptWithInstructions(
52301
- resolveAgentPrompt("triage", settings.agentPrompts) || TRIAGE_SYSTEM_PROMPT,
52930
+ resolveAgentPrompt("triage", settings.agentPrompts) || (isFast ? FAST_TRIAGE_SYSTEM_PROMPT : TRIAGE_SYSTEM_PROMPT),
52302
52931
  triageInstructions
52303
52932
  );
52304
52933
  const skillContext = await buildSessionSkillContext({
@@ -52416,6 +53045,26 @@ Only inject this section when the task genuinely touches frontend UI. Omit it fo
52416
53045
  }
52417
53046
  return;
52418
53047
  }
53048
+ const MAX_REVIEW_REMINDERS = 2;
53049
+ let reviewReminders = 0;
53050
+ while (specReviewVerdictRef.current !== "APPROVE" && !this.pauseAborted.has(task.id) && !this.stuckAborted.has(task.id) && createdSubtasksRef.current.length === 0 && reviewReminders < MAX_REVIEW_REMINDERS) {
53051
+ reviewReminders += 1;
53052
+ const verdictDesc = specReviewVerdictRef.current === null ? "fn_review_spec was never called" : `verdict was ${specReviewVerdictRef.current}`;
53053
+ triageLog.warn(
53054
+ `${task.id} primary planning model returned without APPROVE (${verdictDesc}) \u2014 reminder ${reviewReminders}/${MAX_REVIEW_REMINDERS}`
53055
+ );
53056
+ await this.store.logEntry(
53057
+ task.id,
53058
+ `Primary planning model returned without APPROVE (${verdictDesc}) \u2014 reminder ${reviewReminders}/${MAX_REVIEW_REMINDERS}`
53059
+ );
53060
+ const reminder = specReviewVerdictRef.current === null ? "You wrote the PROMPT.md but did not call `fn_review_spec()`. Call `fn_review_spec()` now to validate the spec. Do not stop until the verdict is APPROVE." : `Spec review verdict was ${specReviewVerdictRef.current}. Address the feedback, rewrite the PROMPT.md as needed, and call \`fn_review_spec()\` again. Do not stop until the verdict is APPROVE.`;
53061
+ stuckDetector?.recordActivity(task.id);
53062
+ await promptWithFallback(session, reminder);
53063
+ checkSessionError(session);
53064
+ if (this.pauseAborted.has(task.id) || this.stuckAborted.has(task.id)) {
53065
+ break;
53066
+ }
53067
+ }
52419
53068
  const planningFallbackProvider = settings.planningFallbackProvider;
52420
53069
  const planningFallbackModelId = settings.planningFallbackModelId;
52421
53070
  const canRetryWithPlanningFallback = specReviewVerdictRef.current !== "APPROVE" && planningFallbackProvider && planningFallbackModelId && modelDesc !== `${planningFallbackProvider}/${planningFallbackModelId}`;
@@ -52831,7 +53480,7 @@ Remove or replace these ids and call fn_task_create again.`
52831
53480
  * using `session.navigateTree()`. Returns a re-prompt instructing the agent
52832
53481
  * to take a fundamentally different approach.
52833
53482
  */
52834
- createReviewSpecTool(taskId, promptPath, sessionRef, checkpointRef, specReviewVerdictRef, approvedCommentFingerprintRef, _settings) {
53483
+ createReviewSpecTool(taskId, promptPath, sessionRef, checkpointRef, specReviewVerdictRef, approvedCommentFingerprintRef, _settings, skipSpecReview) {
52835
53484
  const store = this.store;
52836
53485
  const rootDir = this.rootDir;
52837
53486
  const options = this.options;
@@ -52868,11 +53517,18 @@ Remove or replace these ids and call fn_task_create again.`
52868
53517
  details: {}
52869
53518
  };
52870
53519
  }
52871
- const currentSettings = await store.getSettings();
52872
53520
  const currentDetail = await store.getTask(taskId);
52873
53521
  const currentUserComments = (currentDetail.comments || []).filter(
52874
53522
  (c) => c.author === "user"
52875
53523
  );
53524
+ if (skipSpecReview) {
53525
+ specReviewVerdictRef.current = "APPROVE";
53526
+ approvedCommentFingerprintRef.current = currentUserComments.length > 0 ? computeUserCommentFingerprint(currentUserComments) : "";
53527
+ triageLog.log(`${taskId}: spec review auto-approved (fast mode)`);
53528
+ await store.logEntry(taskId, "Spec review: APPROVE (auto, fast mode)");
53529
+ return { content: [{ type: "text", text: "APPROVE" }], details: {} };
53530
+ }
53531
+ const currentSettings = await store.getSettings();
52876
53532
  const result = await reviewStep(
52877
53533
  rootDir,
52878
53534
  taskId,
@@ -53316,9 +53972,10 @@ function getDependencySyncCommand(rootDir) {
53316
53972
  }
53317
53973
  return null;
53318
53974
  }
53319
- async function syncDependenciesForMerge(store, rootDir, taskId) {
53975
+ async function syncDependenciesForMerge(store, rootDir, taskId, signal) {
53320
53976
  const installCommand = getDependencySyncCommand(rootDir);
53321
53977
  if (!installCommand) return;
53978
+ throwIfAborted(signal, taskId);
53322
53979
  mergerLog.log(`${taskId}: syncing dependencies before merge build verification`);
53323
53980
  await store.logEntry(taskId, `Syncing dependencies before merge build verification: ${installCommand}`);
53324
53981
  try {
@@ -53328,7 +53985,9 @@ async function syncDependenciesForMerge(store, rootDir, taskId) {
53328
53985
  maxBuffer: 10 * 1024 * 1024,
53329
53986
  timeout: 3e5
53330
53987
  });
53988
+ throwIfAborted(signal, taskId);
53331
53989
  } catch (error) {
53990
+ throwIfAborted(signal, taskId);
53332
53991
  const details = error?.stderr || error?.stdout || error?.message || String(error);
53333
53992
  throw new Error(`Dependency sync failed for ${taskId}: ${details}`.trim());
53334
53993
  }
@@ -53376,7 +54035,16 @@ function inferDefaultTestCommand(rootDir, explicitTestCommand, explicitBuildComm
53376
54035
  }
53377
54036
  return null;
53378
54037
  }
53379
- async function runDeterministicVerification(store, rootDir, taskId, testCommand, buildCommand, testSource, buildSource) {
54038
+ function throwIfAborted(signal, taskId) {
54039
+ if (!signal?.aborted) return;
54040
+ throw new MergeAbortedError(`Merge aborted for ${taskId}: engine shutdown requested`);
54041
+ }
54042
+ function rethrowIfMergeAborted(error) {
54043
+ if (error instanceof Error && error.name === "MergeAbortedError") {
54044
+ throw error;
54045
+ }
54046
+ }
54047
+ async function runDeterministicVerification(store, rootDir, taskId, testCommand, buildCommand, testSource, buildSource, signal) {
53380
54048
  const result = { allPassed: true };
53381
54049
  if (!testCommand && !buildCommand) {
53382
54050
  mergerLog.log(`${taskId}: no verification commands configured \u2014 skipping`);
@@ -53401,7 +54069,8 @@ async function runDeterministicVerification(store, rootDir, taskId, testCommand,
53401
54069
  rootDir,
53402
54070
  taskId,
53403
54071
  normalizedTestCommand,
53404
- "test"
54072
+ "test",
54073
+ signal
53405
54074
  );
53406
54075
  result.testResult = testResult;
53407
54076
  if (!testResult.success) {
@@ -53424,7 +54093,8 @@ async function runDeterministicVerification(store, rootDir, taskId, testCommand,
53424
54093
  rootDir,
53425
54094
  taskId,
53426
54095
  normalizedBuildCommand,
53427
- "build"
54096
+ "build",
54097
+ signal
53428
54098
  );
53429
54099
  result.buildResult = buildResult;
53430
54100
  if (!buildResult.success) {
@@ -53445,7 +54115,8 @@ async function runDeterministicVerification(store, rootDir, taskId, testCommand,
53445
54115
  await store.logEntry(taskId, "Deterministic merge verification passed");
53446
54116
  return result;
53447
54117
  }
53448
- async function runVerificationCommand(store, rootDir, taskId, command, type) {
54118
+ async function runVerificationCommand(store, rootDir, taskId, command, type, signal) {
54119
+ throwIfAborted(signal, taskId);
53449
54120
  mergerLog.log(`${taskId}: running ${type} command: ${command}`);
53450
54121
  await store.logEntry(taskId, `[verification] Running ${type} command: ${command}`);
53451
54122
  const result = {
@@ -53463,6 +54134,7 @@ async function runVerificationCommand(store, rootDir, taskId, command, type) {
53463
54134
  timeout: 3e5,
53464
54135
  maxBuffer: VERIFICATION_COMMAND_MAX_BUFFER
53465
54136
  });
54137
+ throwIfAborted(signal, taskId);
53466
54138
  result.stdout = stdout?.toString?.() || "";
53467
54139
  result.stderr = stderr?.toString?.() || "";
53468
54140
  result.exitCode = 0;
@@ -53472,6 +54144,7 @@ async function runVerificationCommand(store, rootDir, taskId, command, type) {
53472
54144
  await store.logEntry(taskId, `[timing] [verification] ${type} command succeeded (exit 0) in ${verificationDurationMs}ms`);
53473
54145
  return result;
53474
54146
  } catch (error) {
54147
+ throwIfAborted(signal, taskId);
53475
54148
  const verificationDurationMs = Date.now() - verificationStartedAt;
53476
54149
  result.stdout = error?.stdout?.toString?.() || "";
53477
54150
  result.stderr = error?.stderr?.toString?.() || "";
@@ -53520,6 +54193,7 @@ async function attemptInMergeVerificationFix(store, rootDir, taskId, failureCont
53520
54193
  } catch {
53521
54194
  }
53522
54195
  }
54196
+ throwIfAborted(options.signal, taskId);
53523
54197
  const { session } = await createResolvedAgentSession({
53524
54198
  sessionPurpose: "merger",
53525
54199
  pluginRunner: options.pluginRunner,
@@ -53571,12 +54245,14 @@ ${failureContext.output.slice(0, VERIFICATION_LOG_MAX_CHARS)}
53571
54245
  4. If the fix doesn't work, try a different approach
53572
54246
  5. Do NOT make any git commits`;
53573
54247
  await withRateLimitRetry(async () => {
54248
+ throwIfAborted(options.signal, taskId);
53574
54249
  await promptWithFallback(session, fixPrompt);
53575
54250
  }, {
53576
54251
  onRetry: (attempt, delayMs, error) => {
53577
54252
  const delaySec = Math.round(delayMs / 1e3);
53578
54253
  mergerLog.warn(`\u23F3 ${taskId} in-merge fix rate limited \u2014 retry ${attempt} in ${delaySec}s: ${error.message}`);
53579
- }
54254
+ },
54255
+ signal: options.signal
53580
54256
  });
53581
54257
  await store.logEntry(
53582
54258
  taskId,
@@ -53587,7 +54263,8 @@ ${failureContext.output.slice(0, VERIFICATION_LOG_MAX_CHARS)}
53587
54263
  rootDir,
53588
54264
  taskId,
53589
54265
  failureContext.command,
53590
- failureContext.type
54266
+ failureContext.type,
54267
+ options.signal
53591
54268
  );
53592
54269
  return reRunResult.success;
53593
54270
  } finally {
@@ -53595,6 +54272,7 @@ ${failureContext.output.slice(0, VERIFICATION_LOG_MAX_CHARS)}
53595
54272
  await session.dispose();
53596
54273
  }
53597
54274
  } catch (err) {
54275
+ rethrowIfMergeAborted(err);
53598
54276
  const errorMessage = err instanceof Error ? err.message : String(err);
53599
54277
  mergerLog.warn(`${taskId}: in-merge fix agent error: ${errorMessage}`);
53600
54278
  await store.logEntry(taskId, "In-merge verification fix agent encountered an error", errorMessage);
@@ -54058,6 +54736,7 @@ You are assisting with a paused \`git pull --rebase\`.
54058
54736
  agent: "merger",
54059
54737
  onAgentText: options?.onAgentText ? (_id, delta) => options.onAgentText?.(delta) : void 0
54060
54738
  });
54739
+ throwIfAborted(options?.signal, taskId);
54061
54740
  const { session } = await createResolvedAgentSession({
54062
54741
  sessionPurpose: "merger",
54063
54742
  pluginRunner: options?.pluginRunner,
@@ -54082,6 +54761,7 @@ You are assisting with a paused \`git pull --rebase\`.
54082
54761
  ].join("\n");
54083
54762
  try {
54084
54763
  await withRateLimitRetry(async () => {
54764
+ throwIfAborted(options?.signal, taskId);
54085
54765
  await promptWithFallback(session, prompt);
54086
54766
  checkSessionError(session);
54087
54767
  }, {
@@ -54089,7 +54769,8 @@ You are assisting with a paused \`git pull --rebase\`.
54089
54769
  mergerLog.warn(
54090
54770
  `${taskId}: rate limited while resolving rebase conflicts \u2014 retry ${attempt} in ${Math.round(delayMs / 1e3)}s: ${error.message}`
54091
54771
  );
54092
- }
54772
+ },
54773
+ signal: options?.signal
54093
54774
  });
54094
54775
  } finally {
54095
54776
  session.dispose();
@@ -54127,6 +54808,7 @@ async function resolveRebaseConflictSet(store, rootDir, taskId, settings, option
54127
54808
  async function pullWithRebaseAndResolveConflicts(store, rootDir, taskId, settings, remote, branch, options) {
54128
54809
  const pullCommand = `git pull --rebase ${quoteArg(remote)} ${quoteArg(branch)}`;
54129
54810
  try {
54811
+ throwIfAborted(options?.signal, taskId);
54130
54812
  await execAsync2(pullCommand, {
54131
54813
  cwd: rootDir,
54132
54814
  timeout: PULL_REBASE_TIMEOUT_MS,
@@ -54146,11 +54828,13 @@ async function pullWithRebaseAndResolveConflicts(store, rootDir, taskId, setting
54146
54828
  try {
54147
54829
  await resolveRebaseConflictSet(store, rootDir, taskId, settings, options);
54148
54830
  for (let attempt = 1; attempt <= 10; attempt++) {
54831
+ throwIfAborted(options?.signal, taskId);
54149
54832
  if (!isRebaseInProgress(rootDir)) {
54150
54833
  mergerLog.log(`${taskId}: rebase conflicts resolved`);
54151
54834
  return;
54152
54835
  }
54153
54836
  try {
54837
+ throwIfAborted(options?.signal, taskId);
54154
54838
  await execAsync2("GIT_EDITOR=true git rebase --continue", {
54155
54839
  cwd: rootDir,
54156
54840
  timeout: PULL_REBASE_TIMEOUT_MS,
@@ -54189,6 +54873,7 @@ async function pullWithRebaseAndResolveConflicts(store, rootDir, taskId, setting
54189
54873
  mergerLog.warn(`${taskId}: failed to abort rebase: ${getCommandErrorMessage(abortError)}`);
54190
54874
  }
54191
54875
  }
54876
+ rethrowIfMergeAborted(resolutionError);
54192
54877
  throw new Error(`unable to resolve rebase conflicts: ${getCommandErrorMessage(resolutionError)}`);
54193
54878
  }
54194
54879
  }
@@ -54196,8 +54881,10 @@ async function pullWithRebaseAndResolveConflicts(store, rootDir, taskId, setting
54196
54881
  async function pushToRemoteAfterMerge(store, rootDir, taskId, settings, options) {
54197
54882
  let target;
54198
54883
  try {
54884
+ throwIfAborted(options?.signal, taskId);
54199
54885
  target = parsePushRemoteTarget(rootDir, settings.pushRemote);
54200
54886
  } catch (error) {
54887
+ rethrowIfMergeAborted(error);
54201
54888
  const message = getCommandErrorMessage(error);
54202
54889
  mergerLog.error(`${taskId}: invalid push remote configuration: ${message}`);
54203
54890
  return { pushed: false, error: message };
@@ -54205,14 +54892,17 @@ async function pushToRemoteAfterMerge(store, rootDir, taskId, settings, options)
54205
54892
  const { remote, branch } = target;
54206
54893
  mergerLog.log(`${taskId}: push-after-merge enabled; syncing ${remote}/${branch}`);
54207
54894
  try {
54895
+ throwIfAborted(options?.signal, taskId);
54208
54896
  await pullWithRebaseAndResolveConflicts(store, rootDir, taskId, settings, remote, branch, options);
54209
54897
  } catch (error) {
54898
+ rethrowIfMergeAborted(error);
54210
54899
  const message = getCommandErrorMessage(error);
54211
54900
  mergerLog.error(`${taskId}: pull --rebase before push failed: ${message}`);
54212
54901
  return { pushed: false, error: message };
54213
54902
  }
54214
54903
  const pushCommand = `git push ${quoteArg(remote)} ${quoteArg(branch)}`;
54215
54904
  try {
54905
+ throwIfAborted(options?.signal, taskId);
54216
54906
  await execAsync2(pushCommand, {
54217
54907
  cwd: rootDir,
54218
54908
  timeout: PUSH_TIMEOUT_MS,
@@ -54229,7 +54919,9 @@ async function pushToRemoteAfterMerge(store, rootDir, taskId, settings, options)
54229
54919
  }
54230
54920
  mergerLog.log(`${taskId}: push rejected as non-fast-forward; retrying pull --rebase and push once`);
54231
54921
  try {
54922
+ throwIfAborted(options?.signal, taskId);
54232
54923
  await pullWithRebaseAndResolveConflicts(store, rootDir, taskId, settings, remote, branch, options);
54924
+ throwIfAborted(options?.signal, taskId);
54233
54925
  await execAsync2(pushCommand, {
54234
54926
  cwd: rootDir,
54235
54927
  timeout: PUSH_TIMEOUT_MS,
@@ -54239,6 +54931,7 @@ async function pushToRemoteAfterMerge(store, rootDir, taskId, settings, options)
54239
54931
  mergerLog.log(`${taskId}: push succeeded after non-fast-forward retry`);
54240
54932
  return { pushed: true };
54241
54933
  } catch (retryError) {
54934
+ rethrowIfMergeAborted(retryError);
54242
54935
  const retryMessage = getCommandErrorMessage(retryError);
54243
54936
  mergerLog.error(`${taskId}: push retry failed: ${retryMessage}`);
54244
54937
  return { pushed: false, error: retryMessage };
@@ -54264,6 +54957,7 @@ async function removePostMergeWorktree(rootDir, postMergeWorktree, taskId) {
54264
54957
  }
54265
54958
  }
54266
54959
  async function aiMergeTask(store, rootDir, taskId, options = {}) {
54960
+ throwIfAborted(options.signal, taskId);
54267
54961
  const task = await store.getTask(taskId);
54268
54962
  const mergeBlocker = getTaskMergeBlocker(task);
54269
54963
  if (mergeBlocker) {
@@ -54322,6 +55016,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
54322
55016
  return result;
54323
55017
  }
54324
55018
  try {
55019
+ throwIfAborted(options.signal, taskId);
54325
55020
  const currentBranch = execSync("git symbolic-ref --short HEAD", {
54326
55021
  cwd: rootDir,
54327
55022
  encoding: "utf-8",
@@ -54339,11 +55034,14 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
54339
55034
  });
54340
55035
  await audit.git({ type: "branch:checkout", target: mainBranch });
54341
55036
  }
54342
- } catch {
55037
+ } catch (error) {
55038
+ rethrowIfMergeAborted(error);
54343
55039
  try {
55040
+ throwIfAborted(options.signal, taskId);
54344
55041
  await execAsync2("git checkout main", { cwd: rootDir });
54345
55042
  await audit.git({ type: "branch:checkout", target: "main" });
54346
- } catch {
55043
+ } catch (fallbackError) {
55044
+ rethrowIfMergeAborted(fallbackError);
54347
55045
  mergerLog.warn(`${taskId}: unable to verify/checkout main branch \u2014 proceeding on current HEAD`);
54348
55046
  }
54349
55047
  }
@@ -54383,6 +55081,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
54383
55081
  if (!remote) {
54384
55082
  mergerLog.log(`${taskId}: no remote resolvable \u2014 skipping pre-merge rebase`);
54385
55083
  } else {
55084
+ throwIfAborted(options.signal, taskId);
54386
55085
  mergerLog.log(`${taskId}: fetching ${remote} before merge`);
54387
55086
  await execAsync2(`git fetch "${remote}"`, { cwd: rootDir });
54388
55087
  try {
@@ -54393,12 +55092,14 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
54393
55092
  const mainBranch = mainBranchOut.trim();
54394
55093
  const remoteRef = `${remote}/${mainBranch}`;
54395
55094
  if (worktreePath) {
55095
+ throwIfAborted(options.signal, taskId);
54396
55096
  await execAsync2(`git rebase "${remoteRef}"`, { cwd: worktreePath });
54397
55097
  mergerLog.log(`${taskId}: rebased ${branch} onto ${remoteRef}`);
54398
55098
  } else {
54399
55099
  mergerLog.warn(`${taskId}: no worktreePath \u2014 skipping task branch rebase`);
54400
55100
  }
54401
55101
  } catch (rebaseErr) {
55102
+ rethrowIfMergeAborted(rebaseErr);
54402
55103
  const msg = rebaseErr instanceof Error ? rebaseErr.message : String(rebaseErr);
54403
55104
  mergerLog.warn(`${taskId}: pre-merge rebase failed (${msg}) \u2014 aborting rebase and falling through to smart/AI merge`);
54404
55105
  if (worktreePath) {
@@ -54411,6 +55112,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
54411
55112
  }
54412
55113
  }
54413
55114
  } catch (err) {
55115
+ rethrowIfMergeAborted(err);
54414
55116
  const msg = err instanceof Error ? err.message : String(err);
54415
55117
  mergerLog.warn(`${taskId}: pre-merge rebase pipeline failed (${msg}) \u2014 proceeding without rebase`);
54416
55118
  }
@@ -54513,6 +55215,13 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
54513
55215
  }
54514
55216
  return false;
54515
55217
  } catch (error) {
55218
+ if (error instanceof Error && error.name === "MergeAbortedError") {
55219
+ try {
55220
+ execSync("git reset --merge", { cwd: rootDir, stdio: "pipe" });
55221
+ } catch {
55222
+ }
55223
+ throw error;
55224
+ }
54516
55225
  if (error.name === "VerificationError") {
54517
55226
  const verificationErr = error;
54518
55227
  const maxFixRetries = Math.min(settings.verificationFixRetries ?? 3, 3);
@@ -54527,6 +55236,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
54527
55236
  const fixAttemptStartedAt = Date.now();
54528
55237
  mergerLog.log(`${taskId}: in-merge verification fix attempt ${fixAttempt}/${maxFixRetries}`);
54529
55238
  await store.logEntry(taskId, `In-merge verification fix attempt ${fixAttempt}/${maxFixRetries}`);
55239
+ throwIfAborted(options.signal, taskId);
54530
55240
  fixSuccess = await attemptInMergeVerificationFix(
54531
55241
  store,
54532
55242
  rootDir,
@@ -54575,6 +55285,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
54575
55285
  const fixAttemptStartedAt = Date.now();
54576
55286
  mergerLog.log(`${taskId}: in-merge verification fix attempt ${fixAttempt}/${maxFixRetries}`);
54577
55287
  await store.logEntry(taskId, `In-merge verification fix attempt ${fixAttempt}/${maxFixRetries}`);
55288
+ throwIfAborted(options.signal, taskId);
54578
55289
  fixSuccess = await attemptInMergeVerificationFix(
54579
55290
  store,
54580
55291
  rootDir,
@@ -54725,6 +55436,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
54725
55436
  mergerLog.warn(`${taskId}: failed to clear stale baseBranch references: ${msg}`);
54726
55437
  }
54727
55438
  }
55439
+ throwIfAborted(options.signal, taskId);
54728
55440
  const hasPostMergeSteps = await hasEnabledPostMergeWorkflowSteps(store, taskId, task.enabledWorkflowSteps);
54729
55441
  if (hasPostMergeSteps) {
54730
55442
  const postMergeWorktree = await createPostMergeWorktree(rootDir, taskId);
@@ -54737,6 +55449,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
54737
55449
  try {
54738
55450
  await runPostMergeWorkflowSteps(store, taskId, rootDir, postMergeCwd, settings, options);
54739
55451
  } catch (err) {
55452
+ rethrowIfMergeAborted(err);
54740
55453
  mergerLog.error(`${taskId}: post-merge workflow steps error: ${err.message}`);
54741
55454
  } finally {
54742
55455
  if (postMergeWorktree) {
@@ -54744,6 +55457,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
54744
55457
  }
54745
55458
  }
54746
55459
  }
55460
+ throwIfAborted(options.signal, taskId);
54747
55461
  if (worktreePath && existsSync21(worktreePath)) {
54748
55462
  const otherUser = await findWorktreeUser(store, worktreePath, taskId);
54749
55463
  if (otherUser) {
@@ -54765,6 +55479,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
54765
55479
  }
54766
55480
  if (settings.pushAfterMerge && settings.mergeStrategy !== "pull-request") {
54767
55481
  try {
55482
+ throwIfAborted(options.signal, taskId);
54768
55483
  const pushResult = await pushToRemoteAfterMerge(store, rootDir, taskId, settings, options);
54769
55484
  if (pushResult.pushed) {
54770
55485
  mergerLog.log(`${taskId}: pushed merged result to remote`);
@@ -54845,7 +55560,9 @@ async function executeMergeAttempt(params, aiTracker) {
54845
55560
  await execAsync2(`git merge --squash "${branch}"`, {
54846
55561
  cwd: rootDir
54847
55562
  });
54848
- } catch {
55563
+ throwIfAborted(options.signal, taskId);
55564
+ } catch (error) {
55565
+ rethrowIfMergeAborted(error);
54849
55566
  mergeExitedWithConflicts = true;
54850
55567
  }
54851
55568
  const conflictedFiles = await getConflictedFiles(rootDir);
@@ -54887,6 +55604,7 @@ async function executeMergeAttempt(params, aiTracker) {
54887
55604
  encoding: "utf-8"
54888
55605
  }).trim();
54889
55606
  if (staged !== "0") {
55607
+ throwIfAborted(options.signal, taskId);
54890
55608
  const escapedLog = commitLog.replace(/"/g, '\\"');
54891
55609
  const fallbackPrefix = includeTaskId ? `feat(${taskId})` : "feat";
54892
55610
  const authorArg = getCommitAuthorArg(settings);
@@ -54897,7 +55615,17 @@ async function executeMergeAttempt(params, aiTracker) {
54897
55615
  mergerLog.log(`${taskId}: committed after auto-resolving all conflicts`);
54898
55616
  }
54899
55617
  if (testCommand || buildCommand) {
54900
- await runDeterministicVerification(store, rootDir, taskId, testCommand, buildCommand, testSource, buildSource);
55618
+ throwIfAborted(options.signal, taskId);
55619
+ await runDeterministicVerification(
55620
+ store,
55621
+ rootDir,
55622
+ taskId,
55623
+ testCommand,
55624
+ buildCommand,
55625
+ testSource,
55626
+ buildSource,
55627
+ options.signal
55628
+ );
54901
55629
  }
54902
55630
  return true;
54903
55631
  }
@@ -54910,7 +55638,17 @@ async function executeMergeAttempt(params, aiTracker) {
54910
55638
  if (squashIsEmpty) {
54911
55639
  mergerLog.log(`${taskId}: squash merge staged nothing \u2014 already merged`);
54912
55640
  if (testCommand || buildCommand) {
54913
- await runDeterministicVerification(store, rootDir, taskId, testCommand, buildCommand, testSource, buildSource);
55641
+ throwIfAborted(options.signal, taskId);
55642
+ await runDeterministicVerification(
55643
+ store,
55644
+ rootDir,
55645
+ taskId,
55646
+ testCommand,
55647
+ buildCommand,
55648
+ testSource,
55649
+ buildSource,
55650
+ options.signal
55651
+ );
54914
55652
  }
54915
55653
  return true;
54916
55654
  }
@@ -54919,6 +55657,7 @@ async function executeMergeAttempt(params, aiTracker) {
54919
55657
  await execAsync2(`git merge --squash "${branch}"`, {
54920
55658
  cwd: rootDir
54921
55659
  });
55660
+ throwIfAborted(options.signal, taskId);
54922
55661
  const squashIsEmpty = execSync(
54923
55662
  "git diff --cached --quiet 2>&1; echo $?",
54924
55663
  { cwd: rootDir, encoding: "utf-8" }
@@ -54926,7 +55665,17 @@ async function executeMergeAttempt(params, aiTracker) {
54926
55665
  if (squashIsEmpty) {
54927
55666
  mergerLog.log(`${taskId}: squash merge staged nothing \u2014 already merged`);
54928
55667
  if (testCommand || buildCommand) {
54929
- await runDeterministicVerification(store, rootDir, taskId, testCommand, buildCommand, testSource, buildSource);
55668
+ throwIfAborted(options.signal, taskId);
55669
+ await runDeterministicVerification(
55670
+ store,
55671
+ rootDir,
55672
+ taskId,
55673
+ testCommand,
55674
+ buildCommand,
55675
+ testSource,
55676
+ buildSource,
55677
+ options.signal
55678
+ );
54930
55679
  }
54931
55680
  return true;
54932
55681
  }
@@ -54943,11 +55692,13 @@ async function executeMergeAttempt(params, aiTracker) {
54943
55692
  }
54944
55693
  }
54945
55694
  if (buildCommand) {
55695
+ throwIfAborted(options.signal, taskId);
54946
55696
  const stagedFiles = await getStagedFiles(rootDir);
54947
55697
  if (shouldSyncDependenciesForMerge(stagedFiles, hasInstallState(rootDir))) {
54948
- await syncDependenciesForMerge(store, rootDir, taskId);
55698
+ await syncDependenciesForMerge(store, rootDir, taskId, options.signal);
54949
55699
  }
54950
55700
  }
55701
+ throwIfAborted(options.signal, taskId);
54951
55702
  aiTracker.aiWasInvoked = true;
54952
55703
  const agentResult = await runAiAgentForCommit({
54953
55704
  store,
@@ -54977,10 +55728,27 @@ async function executeMergeAttempt(params, aiTracker) {
54977
55728
  throw new Error(`Build verification failed for ${taskId}: ${errorMessage}`);
54978
55729
  }
54979
55730
  if (testCommand || buildCommand) {
54980
- await runDeterministicVerification(store, rootDir, taskId, testCommand, buildCommand, testSource, buildSource);
55731
+ throwIfAborted(options.signal, taskId);
55732
+ await runDeterministicVerification(
55733
+ store,
55734
+ rootDir,
55735
+ taskId,
55736
+ testCommand,
55737
+ buildCommand,
55738
+ testSource,
55739
+ buildSource,
55740
+ options.signal
55741
+ );
54981
55742
  }
54982
55743
  return true;
54983
55744
  } catch (error) {
55745
+ if (error instanceof Error && error.name === "MergeAbortedError") {
55746
+ try {
55747
+ execSync("git reset --merge", { cwd: rootDir, stdio: "pipe" });
55748
+ } catch {
55749
+ }
55750
+ throw error;
55751
+ }
54984
55752
  if (error.message?.includes("Build verification failed")) {
54985
55753
  throw error;
54986
55754
  }
@@ -54997,6 +55765,7 @@ async function attemptWithTheirsStrategy(params) {
54997
55765
  const { rootDir, branch, commitLog, includeTaskId, taskId, store, settings, testCommand, buildCommand, testSource, buildSource } = params;
54998
55766
  mergerLog.log(`${taskId}: attempting merge with -X theirs strategy`);
54999
55767
  try {
55768
+ throwIfAborted(params.options.signal, taskId);
55000
55769
  await execAsync2(`git merge -X theirs --squash "${branch}"`, {
55001
55770
  cwd: rootDir
55002
55771
  });
@@ -55014,10 +55783,21 @@ async function attemptWithTheirsStrategy(params) {
55014
55783
  }).trim();
55015
55784
  if (staged === "0") {
55016
55785
  if (testCommand || buildCommand) {
55017
- await runDeterministicVerification(store, rootDir, taskId, testCommand, buildCommand, testSource, buildSource);
55786
+ throwIfAborted(params.options.signal, taskId);
55787
+ await runDeterministicVerification(
55788
+ store,
55789
+ rootDir,
55790
+ taskId,
55791
+ testCommand,
55792
+ buildCommand,
55793
+ testSource,
55794
+ buildSource,
55795
+ params.options.signal
55796
+ );
55018
55797
  }
55019
55798
  return true;
55020
55799
  }
55800
+ throwIfAborted(params.options.signal, taskId);
55021
55801
  const escapedLog = commitLog.replace(/"/g, '\\"');
55022
55802
  const fallbackPrefix = includeTaskId ? `feat(${taskId})` : "feat";
55023
55803
  const authorArg = getCommitAuthorArg(settings);
@@ -55027,10 +55807,23 @@ async function attemptWithTheirsStrategy(params) {
55027
55807
  );
55028
55808
  mergerLog.log(`${taskId}: committed with -X theirs auto-resolution`);
55029
55809
  if (testCommand || buildCommand) {
55030
- await runDeterministicVerification(store, rootDir, taskId, testCommand, buildCommand, testSource, buildSource);
55810
+ throwIfAborted(params.options.signal, taskId);
55811
+ await runDeterministicVerification(
55812
+ store,
55813
+ rootDir,
55814
+ taskId,
55815
+ testCommand,
55816
+ buildCommand,
55817
+ testSource,
55818
+ buildSource,
55819
+ params.options.signal
55820
+ );
55031
55821
  }
55032
55822
  return true;
55033
55823
  } catch (error) {
55824
+ if (error instanceof Error && error.name === "MergeAbortedError") {
55825
+ throw error;
55826
+ }
55034
55827
  mergerLog.error(`${taskId}: -X theirs merge failed: ${error}`);
55035
55828
  return false;
55036
55829
  }
@@ -55096,6 +55889,7 @@ async function runAiAgentForCommit(params) {
55096
55889
  buildMergeSystemPrompt(includeTaskId, settings.agentPrompts, authorArg),
55097
55890
  mergerInstructions
55098
55891
  );
55892
+ throwIfAborted(options.signal, taskId);
55099
55893
  let skillContext = void 0;
55100
55894
  if (options.agentStore) {
55101
55895
  try {
@@ -55142,13 +55936,15 @@ async function runAiAgentForCommit(params) {
55142
55936
  mergerLog.log(`${taskId}: starting fresh merge agent session`);
55143
55937
  try {
55144
55938
  await withRateLimitRetry(async () => {
55939
+ throwIfAborted(options.signal, taskId);
55145
55940
  await promptWithFallback(session, prompt);
55146
55941
  checkSessionError(session);
55147
55942
  }, {
55148
55943
  onRetry: (attempt, delayMs, error) => {
55149
55944
  const delaySec = Math.round(delayMs / 1e3);
55150
55945
  mergerLog.warn(`\u23F3 ${taskId} rate limited \u2014 retry ${attempt} in ${delaySec}s: ${error.message}`);
55151
- }
55946
+ },
55947
+ signal: options.signal
55152
55948
  });
55153
55949
  } catch (err) {
55154
55950
  const errorMessage = err instanceof Error ? err.message : String(err);
@@ -55171,13 +55967,15 @@ async function runAiAgentForCommit(params) {
55171
55967
  });
55172
55968
  try {
55173
55969
  await withRateLimitRetry(async () => {
55970
+ throwIfAborted(options.signal, taskId);
55174
55971
  await promptWithFallback(session, truncatedPrompt);
55175
55972
  checkSessionError(session);
55176
55973
  }, {
55177
55974
  onRetry: (attempt, delayMs, error) => {
55178
55975
  const delaySec = Math.round(delayMs / 1e3);
55179
55976
  mergerLog.warn(`\u23F3 ${taskId} rate limited during truncated retry \u2014 retry ${attempt} in ${delaySec}s: ${error.message}`);
55180
- }
55977
+ },
55978
+ signal: options.signal
55181
55979
  });
55182
55980
  } catch (retryErr) {
55183
55981
  const retryErrorMessage = retryErr instanceof Error ? retryErr.message : String(retryErr);
@@ -55201,6 +55999,7 @@ async function runAiAgentForCommit(params) {
55201
55999
  }).trim();
55202
56000
  if (staged !== "0") {
55203
56001
  if (!buildCommand) {
56002
+ throwIfAborted(options.signal, taskId);
55204
56003
  mergerLog.log("Agent didn't commit \u2014 committing with fallback message");
55205
56004
  const escapedLog = commitLog.replace(/"/g, '\\"');
55206
56005
  const fallbackPrefix = includeTaskId ? `feat(${taskId})` : "feat";
@@ -55305,6 +56104,7 @@ async function hasEnabledPostMergeWorkflowSteps(store, taskId, enabledWorkflowSt
55305
56104
  return false;
55306
56105
  }
55307
56106
  async function runPostMergeWorkflowSteps(store, taskId, rootDir, cwd, settings, mergeOptions = {}) {
56107
+ throwIfAborted(mergeOptions.signal, taskId);
55308
56108
  const task = await store.getTask(taskId);
55309
56109
  if (!task.enabledWorkflowSteps?.length) return;
55310
56110
  const existingResults = task.workflowStepResults || [];
@@ -55514,7 +56314,7 @@ async function completeTask(store, taskId, result) {
55514
56314
  result.task = task;
55515
56315
  store.emit("task:merged", result);
55516
56316
  }
55517
- var execAsync2, LOCKFILE_PATTERNS, GENERATED_PATTERNS, DEPENDENCY_SYNC_TRIGGER_PATTERNS, VERIFICATION_COMMAND_MAX_BUFFER, VERIFICATION_LOG_MAX_CHARS, WORKFLOW_SCRIPT_OUTPUT_MAX_CHARS, PULL_REBASE_TIMEOUT_MS, PUSH_TIMEOUT_MS, MERGE_COMMIT_LOG_MAX_CHARS, MERGE_DIFF_STAT_MAX_CHARS, VerificationError;
56317
+ var execAsync2, LOCKFILE_PATTERNS, GENERATED_PATTERNS, DEPENDENCY_SYNC_TRIGGER_PATTERNS, VERIFICATION_COMMAND_MAX_BUFFER, VERIFICATION_LOG_MAX_CHARS, WORKFLOW_SCRIPT_OUTPUT_MAX_CHARS, PULL_REBASE_TIMEOUT_MS, PUSH_TIMEOUT_MS, MERGE_COMMIT_LOG_MAX_CHARS, MERGE_DIFF_STAT_MAX_CHARS, VerificationError, MergeAbortedError;
55518
56318
  var init_merger = __esm({
55519
56319
  "../engine/src/merger.ts"() {
55520
56320
  "use strict";
@@ -55581,6 +56381,12 @@ var init_merger = __esm({
55581
56381
  this.name = "VerificationError";
55582
56382
  }
55583
56383
  };
56384
+ MergeAbortedError = class extends Error {
56385
+ constructor(message) {
56386
+ super(message);
56387
+ this.name = "MergeAbortedError";
56388
+ }
56389
+ };
55584
56390
  }
55585
56391
  });
55586
56392
 
@@ -56483,12 +57289,33 @@ var init_step_session_executor = __esm({
56483
57289
  * After calling this method, any in-progress or future `executeStep()` calls
56484
57290
  * will return a failed result immediately.
56485
57291
  */
57292
+ /**
57293
+ * Abort in-flight bash on every active step session without disposing the
57294
+ * sessions. Used during runtime shutdown so detached bash subprocess trees
57295
+ * (including vitest workers) are killed via pi-coding-agent's
57296
+ * killProcessTree. Sessions remain alive so near-complete steps can still
57297
+ * finish during the runtime's graceful drain window.
57298
+ */
57299
+ abortAllSessionBash() {
57300
+ for (const [stepIdx, handle] of this.activeSessions) {
57301
+ try {
57302
+ handle.abortBash();
57303
+ } catch (err) {
57304
+ stepExecLog.warn(`Failed to abort bash for step ${stepIdx}: ${err}`);
57305
+ }
57306
+ }
57307
+ }
56486
57308
  async terminateAllSessions() {
56487
57309
  this.aborted = true;
56488
57310
  stepExecLog.log(
56489
57311
  `Terminating ${this.activeSessions.size} active session(s) for task ${this.options.taskDetail.id}`
56490
57312
  );
56491
57313
  for (const [stepIdx, handle] of this.activeSessions) {
57314
+ try {
57315
+ handle.abortBash();
57316
+ } catch (err) {
57317
+ stepExecLog.warn(`Failed to abort bash for step ${stepIdx}: ${err}`);
57318
+ }
56492
57319
  try {
56493
57320
  handle.dispose();
56494
57321
  } catch (err) {
@@ -56638,7 +57465,10 @@ var init_step_session_executor = __esm({
56638
57465
  ...this.options.skillSelection ? { skillSelection: this.options.skillSelection } : {}
56639
57466
  });
56640
57467
  session = createResult.session;
56641
- const handle = { dispose: () => session?.dispose() };
57468
+ const handle = {
57469
+ dispose: () => session?.dispose(),
57470
+ abortBash: () => session?.abortBash()
57471
+ };
56642
57472
  this.activeSessions.set(stepIndex, handle);
56643
57473
  stuckTaskDetector?.trackTask(trackingKey, { dispose: () => session?.dispose() }, taskDetail.id);
56644
57474
  stepExecLog.log(
@@ -57697,6 +58527,40 @@ Lint, tests, and typecheck are also hard quality gates:
57697
58527
  getExecutingTaskIds() {
57698
58528
  return /* @__PURE__ */ new Set([...this.executing, ...this.recoveringCompleted]);
57699
58529
  }
58530
+ /**
58531
+ * Abort the in-flight bash subprocess (if any) on every active agent session.
58532
+ *
58533
+ * Invoked at runtime shutdown so detached subprocess trees spawned by agent
58534
+ * bash tools — including grandchildren like vitest workers — are killed via
58535
+ * pi-coding-agent's killProcessTree. Without this, when the worker is killed
58536
+ * those process groups are orphaned because they're detached.
58537
+ *
58538
+ * Sessions are not disposed here so any near-complete agent loop still has a
58539
+ * chance to wrap up during the runtime's graceful drain window.
58540
+ */
58541
+ abortAllSessionBash() {
58542
+ for (const [taskId, { session }] of this.activeSessions) {
58543
+ try {
58544
+ session.abortBash();
58545
+ } catch (err) {
58546
+ executorLog.warn(`abortAllSessionBash: failed for task ${taskId}: ${err}`);
58547
+ }
58548
+ }
58549
+ for (const [agentId, session] of this.childSessions) {
58550
+ try {
58551
+ session.abortBash();
58552
+ } catch (err) {
58553
+ executorLog.warn(`abortAllSessionBash: failed for child agent ${agentId}: ${err}`);
58554
+ }
58555
+ }
58556
+ for (const [taskId, stepExecutor] of this.activeStepExecutors) {
58557
+ try {
58558
+ stepExecutor.abortAllSessionBash();
58559
+ } catch (err) {
58560
+ executorLog.warn(`abortAllSessionBash: failed for step executor ${taskId}: ${err}`);
58561
+ }
58562
+ }
58563
+ }
57700
58564
  /**
57701
58565
  * Check whether a task's work is complete — all steps are done or skipped.
57702
58566
  * Used to detect tasks that called fn_task_done() but never transitioned to in-review
@@ -60906,6 +61770,32 @@ function pathsOverlap2(a, b) {
60906
61770
  }
60907
61771
  return false;
60908
61772
  }
61773
+ function normalizeOverlapPath(path) {
61774
+ return path.trim().replaceAll("\\", "/").replace(/^\.\//, "");
61775
+ }
61776
+ function isIgnoredOverlapPath(path, ignorePath) {
61777
+ const normalizedPath = normalizeOverlapPath(path);
61778
+ const normalizedIgnore = normalizeOverlapPath(ignorePath);
61779
+ if (normalizedIgnore.endsWith("/*")) {
61780
+ const directory = normalizedIgnore.slice(0, -2);
61781
+ return normalizedPath === directory || normalizedPath.startsWith(`${directory}/`);
61782
+ }
61783
+ if (normalizedIgnore.endsWith("/")) {
61784
+ const directory = normalizedIgnore.slice(0, -1);
61785
+ return normalizedPath === directory || normalizedPath.startsWith(normalizedIgnore);
61786
+ }
61787
+ return normalizedPath === normalizedIgnore || normalizedPath.startsWith(`${normalizedIgnore}/`);
61788
+ }
61789
+ function filterPathsByIgnoreList(paths, ignorePaths) {
61790
+ if (!ignorePaths || ignorePaths.length === 0) {
61791
+ return paths;
61792
+ }
61793
+ const normalizedIgnorePaths = ignorePaths.map(normalizeOverlapPath).filter(Boolean);
61794
+ if (normalizedIgnorePaths.length === 0) {
61795
+ return paths;
61796
+ }
61797
+ return paths.filter((path) => !normalizedIgnorePaths.some((ignore) => isIgnoredOverlapPath(path, ignore)));
61798
+ }
60909
61799
  var Scheduler;
60910
61800
  var init_scheduler = __esm({
60911
61801
  "../engine/src/scheduler.ts"() {
@@ -61253,16 +62143,19 @@ var init_scheduler = __esm({
61253
62143
  todo = sortTasksByPriorityThenAgeAndId(todo);
61254
62144
  const activeScopes = /* @__PURE__ */ new Map();
61255
62145
  if (settings.groupOverlappingFiles) {
62146
+ const overlapIgnorePaths = settings.overlapIgnorePaths ?? [];
61256
62147
  for (const t of inProgress) {
61257
62148
  const scope = await this.store.parseFileScopeFromPrompt(t.id);
61258
- if (scope.length > 0) activeScopes.set(t.id, scope);
62149
+ const filteredScope = filterPathsByIgnoreList(scope, overlapIgnorePaths);
62150
+ if (filteredScope.length > 0) activeScopes.set(t.id, filteredScope);
61259
62151
  }
61260
62152
  const inReviewWithWorktree = tasks.filter(
61261
62153
  (t) => t.column === "in-review" && t.worktree
61262
62154
  );
61263
62155
  for (const t of inReviewWithWorktree) {
61264
62156
  const scope = await this.store.parseFileScopeFromPrompt(t.id);
61265
- if (scope.length > 0) activeScopes.set(t.id, scope);
62157
+ const filteredScope = filterPathsByIgnoreList(scope, overlapIgnorePaths);
62158
+ if (filteredScope.length > 0) activeScopes.set(t.id, filteredScope);
61266
62159
  }
61267
62160
  }
61268
62161
  const ordered = resolveDependencyOrder(todo);
@@ -61298,7 +62191,11 @@ var init_scheduler = __esm({
61298
62191
  continue;
61299
62192
  }
61300
62193
  if (settings.groupOverlappingFiles) {
61301
- const taskScope = await this.store.parseFileScopeFromPrompt(task.id);
62194
+ const overlapIgnorePaths = settings.overlapIgnorePaths ?? [];
62195
+ const taskScope = filterPathsByIgnoreList(
62196
+ await this.store.parseFileScopeFromPrompt(task.id),
62197
+ overlapIgnorePaths
62198
+ );
61302
62199
  if (taskScope.length > 0) {
61303
62200
  let overlappingTaskId = null;
61304
62201
  for (const [ipId, ipScope] of activeScopes) {
@@ -61342,7 +62239,10 @@ var init_scheduler = __esm({
61342
62239
  this.options.onSchedule?.(task);
61343
62240
  started++;
61344
62241
  if (settings.groupOverlappingFiles) {
61345
- const scope = await this.store.parseFileScopeFromPrompt(task.id);
62242
+ const scope = filterPathsByIgnoreList(
62243
+ await this.store.parseFileScopeFromPrompt(task.id),
62244
+ settings.overlapIgnorePaths
62245
+ );
61346
62246
  if (scope.length > 0) activeScopes.set(task.id, scope);
61347
62247
  }
61348
62248
  }
@@ -62373,7 +63273,7 @@ var init_mission_autopilot = __esm({
62373
63273
  });
62374
63274
 
62375
63275
  // ../engine/src/mission-execution-loop.ts
62376
- import { EventEmitter as EventEmitter15 } from "node:events";
63276
+ import { EventEmitter as EventEmitter16 } from "node:events";
62377
63277
  var loopLog, VALIDATION_TIMEOUT_MS, MissionExecutionLoop;
62378
63278
  var init_mission_execution_loop = __esm({
62379
63279
  "../engine/src/mission-execution-loop.ts"() {
@@ -62383,7 +63283,7 @@ var init_mission_execution_loop = __esm({
62383
63283
  init_logger2();
62384
63284
  loopLog = createLogger2("mission-loop");
62385
63285
  VALIDATION_TIMEOUT_MS = 10 * 60 * 1e3;
62386
- MissionExecutionLoop = class extends EventEmitter15 {
63286
+ MissionExecutionLoop = class extends EventEmitter16 {
62387
63287
  running = false;
62388
63288
  taskStore;
62389
63289
  missionStore;
@@ -68726,7 +69626,7 @@ var init_plugin_runner = __esm({
68726
69626
  });
68727
69627
 
68728
69628
  // ../engine/src/runtimes/in-process-runtime.ts
68729
- import { EventEmitter as EventEmitter16 } from "node:events";
69629
+ import { EventEmitter as EventEmitter17 } from "node:events";
68730
69630
  var InProcessRuntime;
68731
69631
  var init_in_process_runtime = __esm({
68732
69632
  "../engine/src/runtimes/in-process-runtime.ts"() {
@@ -68747,7 +69647,7 @@ var init_in_process_runtime = __esm({
68747
69647
  init_mission_autopilot();
68748
69648
  init_mission_execution_loop();
68749
69649
  init_triage();
68750
- InProcessRuntime = class extends EventEmitter16 {
69650
+ InProcessRuntime = class extends EventEmitter17 {
68751
69651
  /**
68752
69652
  * @param config - Runtime configuration
68753
69653
  * @param centralCore - CentralCore reference for global coordination
@@ -69088,9 +69988,11 @@ var init_in_process_runtime = __esm({
69088
69988
  );
69089
69989
  this.triggerScheduler.start();
69090
69990
  const isHeartbeatEnabledAgent = (agent) => !isEphemeralAgent(agent) && agent.runtimeConfig?.enabled !== false;
69991
+ const isTickableHeartbeatState = (state) => state === "active" || state === "running" || state === "idle";
69992
+ const isTimerManagedAgent = (agent) => isHeartbeatEnabledAgent(agent) && isTickableHeartbeatState(agent.state);
69091
69993
  this.agentCreatedListener = (agent) => {
69092
69994
  if (!this.triggerScheduler) return;
69093
- if (!isHeartbeatEnabledAgent(agent)) return;
69995
+ if (!isTimerManagedAgent(agent)) return;
69094
69996
  const rc = agent.runtimeConfig;
69095
69997
  this.triggerScheduler.registerAgent(agent.id, {
69096
69998
  heartbeatIntervalMs: rc?.heartbeatIntervalMs,
@@ -69101,7 +70003,7 @@ var init_in_process_runtime = __esm({
69101
70003
  this.agentStore.on("agent:created", this.agentCreatedListener);
69102
70004
  this.agentUpdatedListener = (agent) => {
69103
70005
  if (!this.triggerScheduler) return;
69104
- if (!isHeartbeatEnabledAgent(agent)) {
70006
+ if (!isTimerManagedAgent(agent)) {
69105
70007
  this.triggerScheduler.unregisterAgent(agent.id);
69106
70008
  runtimeLog.log(`Unregistered agent ${agent.id} from heartbeat triggers`);
69107
70009
  return;
@@ -69130,6 +70032,9 @@ var init_in_process_runtime = __esm({
69130
70032
  try {
69131
70033
  await this.agentStore?.deleteAgent(agentId);
69132
70034
  } catch (err) {
70035
+ if (this.isBenignEphemeralDeleteRaceError(agentId, err)) {
70036
+ return;
70037
+ }
69133
70038
  const msg = err instanceof Error ? err.message : String(err);
69134
70039
  runtimeLog.warn(`Failed to delete ephemeral agent ${agentId} after termination: ${msg}`);
69135
70040
  }
@@ -69146,7 +70051,7 @@ var init_in_process_runtime = __esm({
69146
70051
  const agents = await this.agentStore.listAgents();
69147
70052
  let registeredCount = 0;
69148
70053
  for (const agent of agents) {
69149
- if (!isHeartbeatEnabledAgent(agent)) continue;
70054
+ if (!isTimerManagedAgent(agent)) continue;
69150
70055
  const rc = agent.runtimeConfig;
69151
70056
  this.triggerScheduler.registerAgent(agent.id, {
69152
70057
  heartbeatIntervalMs: rc?.heartbeatIntervalMs,
@@ -69348,6 +70253,14 @@ var init_in_process_runtime = __esm({
69348
70253
  this.missionExecutionLoop.stop();
69349
70254
  runtimeLog.log("MissionExecutionLoop stopped");
69350
70255
  }
70256
+ if (this.executor) {
70257
+ try {
70258
+ this.executor.abortAllSessionBash();
70259
+ runtimeLog.log("Aborted in-flight bash subprocesses on active sessions");
70260
+ } catch (err) {
70261
+ runtimeLog.warn(`Failed to abort in-flight bash subprocesses: ${err}`);
70262
+ }
70263
+ }
69351
70264
  const shutdownTimeout = 3e4;
69352
70265
  const startTime = Date.now();
69353
70266
  while (Date.now() - startTime < shutdownTimeout) {
@@ -69553,6 +70466,21 @@ var init_in_process_runtime = __esm({
69553
70466
  });
69554
70467
  runtimeLog.log("Event forwarding setup complete");
69555
70468
  }
70469
+ /**
70470
+ * Returns true when an ephemeral delete failure is expected due to cleanup races
70471
+ * (for example the agent was already removed by a parallel cleanup path).
70472
+ */
70473
+ isBenignEphemeralDeleteRaceError(agentId, err) {
70474
+ const msg = err instanceof Error ? err.message : String(err);
70475
+ const normalized = msg.toLowerCase();
70476
+ if (normalized.includes("already deleted") || normalized.includes("already removed")) {
70477
+ return true;
70478
+ }
70479
+ if (normalized.includes(`agent ${agentId.toLowerCase()} not found`)) {
70480
+ return true;
70481
+ }
70482
+ return /^agent\s+.+\s+not found$/i.test(msg.trim());
70483
+ }
69556
70484
  /**
69557
70485
  * Update status and emit health-changed event.
69558
70486
  */
@@ -69683,6 +70611,7 @@ var init_project_engine = __esm({
69683
70611
  mergeActive = /* @__PURE__ */ new Set();
69684
70612
  mergeRunning = false;
69685
70613
  activeMergeSession = null;
70614
+ mergeAbortController = null;
69686
70615
  mergeRetryTimer = null;
69687
70616
  /**
69688
70617
  * Pending manual merge resolvers — keyed by taskId.
@@ -69742,6 +70671,13 @@ var init_project_engine = __esm({
69742
70671
  }
69743
70672
  } catch {
69744
70673
  }
70674
+ try {
70675
+ const { syncMemoryDreamsAutomation: syncMemoryDreamsAutomation2 } = await Promise.resolve().then(() => (init_src(), src_exports));
70676
+ if (typeof syncMemoryDreamsAutomation2 === "function") {
70677
+ await syncMemoryDreamsAutomation2(this.automationStore, settings);
70678
+ }
70679
+ } catch {
70680
+ }
69745
70681
  this.cronRunner.start();
69746
70682
  runtimeLog.log("CronRunner initialized and started");
69747
70683
  } catch (err) {
@@ -69758,6 +70694,10 @@ var init_project_engine = __esm({
69758
70694
  }
69759
70695
  /**
69760
70696
  * Gracefully stop the engine and all subsystems.
70697
+ *
70698
+ * If a merge is currently running, its abort signal is triggered before the
70699
+ * active merge session is disposed so merge pipeline checkpoints can exit
70700
+ * promptly without continuing git/verification work after shutdown starts.
69761
70701
  */
69762
70702
  async stop() {
69763
70703
  this.shuttingDown = true;
@@ -69765,6 +70705,13 @@ var init_project_engine = __esm({
69765
70705
  clearTimeout(this.mergeRetryTimer);
69766
70706
  this.mergeRetryTimer = null;
69767
70707
  }
70708
+ this.mergeAbortController?.abort();
70709
+ this.mergeAbortController = null;
70710
+ const queuedTaskIds = [...this.mergeQueue];
70711
+ this.mergeQueue.length = 0;
70712
+ for (const queuedTaskId of queuedTaskIds) {
70713
+ this.mergeActive.delete(queuedTaskId);
70714
+ }
69768
70715
  if (this.activeMergeSession) {
69769
70716
  this.activeMergeSession.dispose();
69770
70717
  this.activeMergeSession = null;
@@ -69898,6 +70845,7 @@ var init_project_engine = __esm({
69898
70845
  return (task.mergeRetries ?? 0) < _ProjectEngine.MAX_AUTO_MERGE_RETRIES || this.hasAutoHealableVerificationBufferFailure(task) || this.isRetryCooldownElapsed(task);
69899
70846
  }
69900
70847
  internalEnqueueMerge(taskId) {
70848
+ if (this.shuttingDown) return;
69901
70849
  if (this.mergeActive.has(taskId)) return;
69902
70850
  this.mergeActive.add(taskId);
69903
70851
  this.mergeQueue.push(taskId);
@@ -70006,14 +70954,18 @@ var init_project_engine = __esm({
70006
70954
  const pool = this.runtime.worktreePool;
70007
70955
  const agentStore = this.runtime.agentStore;
70008
70956
  const usageLimitPauser = this.runtime.usageLimitPauser;
70009
- const rawMerge = () => aiMergeTask(store, cwd, taskId, {
70010
- pool,
70011
- usageLimitPauser,
70012
- agentStore,
70013
- onSession: (session) => {
70014
- this.activeMergeSession = session;
70015
- }
70016
- });
70957
+ const rawMerge = () => {
70958
+ this.mergeAbortController = new AbortController();
70959
+ return aiMergeTask(store, cwd, taskId, {
70960
+ pool,
70961
+ usageLimitPauser,
70962
+ agentStore,
70963
+ signal: this.mergeAbortController.signal,
70964
+ onSession: (session) => {
70965
+ this.activeMergeSession = session;
70966
+ }
70967
+ });
70968
+ };
70017
70969
  let result;
70018
70970
  if (semaphore) {
70019
70971
  result = await semaphore.run(rawMerge, PRIORITY_MERGE);
@@ -70034,6 +70986,18 @@ var init_project_engine = __esm({
70034
70986
  } catch (err) {
70035
70987
  this.activeMergeSession = null;
70036
70988
  const errorMsg = err instanceof Error ? err.message : String(err);
70989
+ const mergeWasAborted = err instanceof Error && err.name === "MergeAbortedError";
70990
+ if (mergeWasAborted) {
70991
+ runtimeLog.log(`${manualResolver ? "Manual" : "Auto"}-merge aborted for ${taskId}: ${errorMsg}`);
70992
+ this.mergeAbortController = null;
70993
+ if (manualResolver) {
70994
+ this.manualMergeResolvers.delete(taskId);
70995
+ manualResolver.reject(err instanceof Error ? err : new Error(errorMsg));
70996
+ } else {
70997
+ await store.updateTask(taskId, { status: null }).catch(() => void 0);
70998
+ }
70999
+ continue;
71000
+ }
70037
71001
  runtimeLog.error(`${manualResolver ? "Manual" : "Auto"}-merge failed for ${taskId}: ${errorMsg}`);
70038
71002
  if (manualResolver) {
70039
71003
  this.manualMergeResolvers.delete(taskId);
@@ -70118,6 +71082,7 @@ var init_project_engine = __esm({
70118
71082
  }
70119
71083
  }
70120
71084
  } finally {
71085
+ this.mergeAbortController = null;
70121
71086
  this.mergeActive.delete(taskId);
70122
71087
  const lateResolver = this.manualMergeResolvers.get(taskId);
70123
71088
  if (lateResolver) {
@@ -70650,7 +71615,7 @@ var init_ai_session_diagnostics = __esm({
70650
71615
 
70651
71616
  // ../dashboard/src/planning.ts
70652
71617
  import { randomUUID as randomUUID10 } from "node:crypto";
70653
- import { EventEmitter as EventEmitter17 } from "node:events";
71618
+ import { EventEmitter as EventEmitter18 } from "node:events";
70654
71619
  async function initEngine2() {
70655
71620
  try {
70656
71621
  const engineModule = "@fusion/engine";
@@ -71484,7 +72449,7 @@ For completion:
71484
72449
  process.on("beforeExit", () => {
71485
72450
  clearInterval(cleanupInterval2);
71486
72451
  });
71487
- PlanningStreamManager = class extends EventEmitter17 {
72452
+ PlanningStreamManager = class extends EventEmitter18 {
71488
72453
  constructor(bufferSize = 100) {
71489
72454
  super();
71490
72455
  this.bufferSize = bufferSize;
@@ -73083,7 +74048,7 @@ var init_github = __esm({
73083
74048
  });
73084
74049
 
73085
74050
  // ../dashboard/src/github-poll.ts
73086
- import { EventEmitter as EventEmitter18 } from "node:events";
74051
+ import { EventEmitter as EventEmitter19 } from "node:events";
73087
74052
  function toAlias(type, number) {
73088
74053
  return `${type}_${number}`;
73089
74054
  }
@@ -73129,7 +74094,7 @@ var init_github_poll = __esm({
73129
74094
  }
73130
74095
  };
73131
74096
  githubRateLimiter = new GitHubRateLimiter();
73132
- GitHubPollingService = class extends EventEmitter18 {
74097
+ GitHubPollingService = class extends EventEmitter19 {
73133
74098
  watches = /* @__PURE__ */ new Map();
73134
74099
  rateLimiter;
73135
74100
  pollingIntervalMs;
@@ -73376,7 +74341,7 @@ var init_github_poll = __esm({
73376
74341
  // ../dashboard/src/terminal.ts
73377
74342
  import { spawn as spawn2 } from "node:child_process";
73378
74343
  import { randomUUID as randomUUID11 } from "node:crypto";
73379
- import { EventEmitter as EventEmitter19 } from "node:events";
74344
+ import { EventEmitter as EventEmitter20 } from "node:events";
73380
74345
  function extractBaseCommand(command) {
73381
74346
  let trimmed = command.trim();
73382
74347
  while (/^[A-Za-z_][A-Za-z0-9_]*=/.test(trimmed)) {
@@ -73522,7 +74487,7 @@ var init_terminal = __esm({
73522
74487
  // >(...) — process substitution (bash)
73523
74488
  ];
73524
74489
  CHAIN_OPERATORS = /&&|\|\||;|(?<!\|)\|(?!\|)/;
73525
- TerminalSessionManager = class extends EventEmitter19 {
74490
+ TerminalSessionManager = class extends EventEmitter20 {
73526
74491
  sessions = /* @__PURE__ */ new Map();
73527
74492
  defaultTimeout = 3e4;
73528
74493
  // 30 seconds
@@ -73660,13 +74625,13 @@ var init_terminal = __esm({
73660
74625
  });
73661
74626
 
73662
74627
  // ../dashboard/src/terminal-service.ts
73663
- import { createRequire } from "node:module";
74628
+ import { createRequire as createRequire2 } from "node:module";
73664
74629
  var isBunBinary, require2;
73665
74630
  var init_terminal_service = __esm({
73666
74631
  "../dashboard/src/terminal-service.ts"() {
73667
74632
  "use strict";
73668
74633
  isBunBinary = typeof Bun !== "undefined" && !!Bun.embeddedFiles;
73669
- require2 = createRequire(import.meta.url);
74634
+ require2 = createRequire2(import.meta.url);
73670
74635
  }
73671
74636
  });
73672
74637
 
@@ -73708,60 +74673,6 @@ var init_project_store_resolver = __esm({
73708
74673
  }
73709
74674
  });
73710
74675
 
73711
- // ../dashboard/src/runtime-logger.ts
73712
- var init_runtime_logger = __esm({
73713
- "../dashboard/src/runtime-logger.ts"() {
73714
- "use strict";
73715
- }
73716
- });
73717
-
73718
- // ../dashboard/src/api-error.ts
73719
- var init_api_error = __esm({
73720
- "../dashboard/src/api-error.ts"() {
73721
- "use strict";
73722
- init_runtime_logger();
73723
- }
73724
- });
73725
-
73726
- // ../dashboard/src/mission-routes.ts
73727
- import { Router } from "express";
73728
- var init_mission_routes = __esm({
73729
- "../dashboard/src/mission-routes.ts"() {
73730
- "use strict";
73731
- init_project_store_resolver();
73732
- init_src();
73733
- init_sse_buffer();
73734
- init_api_error();
73735
- }
73736
- });
73737
-
73738
- // ../dashboard/src/roadmap-suggestions.ts
73739
- var init_roadmap_suggestions = __esm({
73740
- "../dashboard/src/roadmap-suggestions.ts"() {
73741
- "use strict";
73742
- }
73743
- });
73744
-
73745
- // ../dashboard/src/roadmap-routes.ts
73746
- import { Router as Router2 } from "express";
73747
- var init_roadmap_routes = __esm({
73748
- "../dashboard/src/roadmap-routes.ts"() {
73749
- "use strict";
73750
- init_api_error();
73751
- init_roadmap_suggestions();
73752
- init_project_store_resolver();
73753
- }
73754
- });
73755
-
73756
- // ../dashboard/src/insights-routes.ts
73757
- import { Router as Router3 } from "express";
73758
- var init_insights_routes = __esm({
73759
- "../dashboard/src/insights-routes.ts"() {
73760
- "use strict";
73761
- init_api_error();
73762
- }
73763
- });
73764
-
73765
74676
  // ../dashboard/src/ai-session-store.ts
73766
74677
  var MAX_THINKING_BYTES, SESSION_CLEANUP_DEFAULT_MAX_AGE_MS, SESSION_CLEANUP_INTERVAL_MS, diagnostics2;
73767
74678
  var init_ai_session_store = __esm({
@@ -73776,7 +74687,7 @@ var init_ai_session_store = __esm({
73776
74687
  });
73777
74688
 
73778
74689
  // ../dashboard/src/subtask-breakdown.ts
73779
- import { EventEmitter as EventEmitter20 } from "node:events";
74690
+ import { EventEmitter as EventEmitter21 } from "node:events";
73780
74691
  function cleanupInMemorySubtaskSession(sessionId) {
73781
74692
  const session = sessions2.get(sessionId);
73782
74693
  if (!session) {
@@ -73814,7 +74725,7 @@ var init_subtask_breakdown = __esm({
73814
74725
  process.on("beforeExit", () => {
73815
74726
  clearInterval(cleanupInterval3);
73816
74727
  });
73817
- SubtaskStreamManager = class extends EventEmitter20 {
74728
+ SubtaskStreamManager = class extends EventEmitter21 {
73818
74729
  constructor(bufferSize = 100) {
73819
74730
  super();
73820
74731
  this.bufferSize = bufferSize;
@@ -73936,7 +74847,7 @@ var init_agent_generation = __esm({
73936
74847
  });
73937
74848
 
73938
74849
  // ../dashboard/src/mission-interview.ts
73939
- import { EventEmitter as EventEmitter21 } from "node:events";
74850
+ import { EventEmitter as EventEmitter22 } from "node:events";
73940
74851
  function cleanupInMemoryMissionSession(sessionId) {
73941
74852
  const session = sessions4.get(sessionId);
73942
74853
  if (!session) {
@@ -73982,7 +74893,7 @@ var init_mission_interview = __esm({
73982
74893
  cleanupInterval5 = setInterval(cleanupExpiredSessions4, CLEANUP_INTERVAL_MS5);
73983
74894
  cleanupInterval5.unref?.();
73984
74895
  process.on("beforeExit", () => clearInterval(cleanupInterval5));
73985
- MissionInterviewStreamManager = class extends EventEmitter21 {
74896
+ MissionInterviewStreamManager = class extends EventEmitter22 {
73986
74897
  constructor(bufferSize = 100) {
73987
74898
  super();
73988
74899
  this.bufferSize = bufferSize;
@@ -74050,7 +74961,7 @@ var init_mission_interview = __esm({
74050
74961
  });
74051
74962
 
74052
74963
  // ../dashboard/src/milestone-slice-interview.ts
74053
- import { EventEmitter as EventEmitter22 } from "node:events";
74964
+ import { EventEmitter as EventEmitter23 } from "node:events";
74054
74965
  function cleanupInMemorySession2(sessionId) {
74055
74966
  const session = sessions5.get(sessionId);
74056
74967
  if (!session) {
@@ -74097,7 +75008,7 @@ var init_milestone_slice_interview = __esm({
74097
75008
  cleanupInterval6 = setInterval(cleanupExpiredSessions5, CLEANUP_INTERVAL_MS6);
74098
75009
  cleanupInterval6.unref?.();
74099
75010
  process.on("beforeExit", () => clearInterval(cleanupInterval6));
74100
- MilestoneSliceInterviewStreamManager = class extends EventEmitter22 {
75011
+ MilestoneSliceInterviewStreamManager = class extends EventEmitter23 {
74101
75012
  constructor(bufferSize = 100) {
74102
75013
  super();
74103
75014
  this.bufferSize = bufferSize;
@@ -74164,6 +75075,156 @@ var init_milestone_slice_interview = __esm({
74164
75075
  }
74165
75076
  });
74166
75077
 
75078
+ // ../dashboard/src/runtime-logger.ts
75079
+ var init_runtime_logger = __esm({
75080
+ "../dashboard/src/runtime-logger.ts"() {
75081
+ "use strict";
75082
+ }
75083
+ });
75084
+
75085
+ // ../dashboard/src/api-error.ts
75086
+ var init_api_error = __esm({
75087
+ "../dashboard/src/api-error.ts"() {
75088
+ "use strict";
75089
+ init_runtime_logger();
75090
+ }
75091
+ });
75092
+
75093
+ // ../dashboard/src/rate-limit.ts
75094
+ var init_rate_limit = __esm({
75095
+ "../dashboard/src/rate-limit.ts"() {
75096
+ "use strict";
75097
+ init_api_error();
75098
+ }
75099
+ });
75100
+
75101
+ // ../dashboard/src/plugin-routes.ts
75102
+ import { Router } from "express";
75103
+ var init_plugin_routes = __esm({
75104
+ "../dashboard/src/plugin-routes.ts"() {
75105
+ "use strict";
75106
+ init_src();
75107
+ init_api_error();
75108
+ }
75109
+ });
75110
+
75111
+ // ../dashboard/src/routes/context.ts
75112
+ import { Router as Router2 } from "express";
75113
+ var init_context = __esm({
75114
+ "../dashboard/src/routes/context.ts"() {
75115
+ "use strict";
75116
+ init_api_error();
75117
+ init_project_store_resolver();
75118
+ init_runtime_logger();
75119
+ }
75120
+ });
75121
+
75122
+ // ../dashboard/src/routes/register-tasks.ts
75123
+ var init_register_tasks = __esm({
75124
+ "../dashboard/src/routes/register-tasks.ts"() {
75125
+ "use strict";
75126
+ }
75127
+ });
75128
+
75129
+ // ../dashboard/src/routes/register-planning-chat.ts
75130
+ var init_register_planning_chat = __esm({
75131
+ "../dashboard/src/routes/register-planning-chat.ts"() {
75132
+ "use strict";
75133
+ }
75134
+ });
75135
+
75136
+ // ../dashboard/src/routes/register-settings-memory.ts
75137
+ var init_register_settings_memory = __esm({
75138
+ "../dashboard/src/routes/register-settings-memory.ts"() {
75139
+ "use strict";
75140
+ }
75141
+ });
75142
+
75143
+ // ../dashboard/src/routes/register-messaging-scripts.ts
75144
+ var init_register_messaging_scripts = __esm({
75145
+ "../dashboard/src/routes/register-messaging-scripts.ts"() {
75146
+ "use strict";
75147
+ init_src();
75148
+ init_api_error();
75149
+ init_terminal_service();
75150
+ }
75151
+ });
75152
+
75153
+ // ../dashboard/src/routes/register-git-github.ts
75154
+ var init_register_git_github = __esm({
75155
+ "../dashboard/src/routes/register-git-github.ts"() {
75156
+ "use strict";
75157
+ }
75158
+ });
75159
+
75160
+ // ../dashboard/src/routes/register-files-terminal-workspaces.ts
75161
+ var init_register_files_terminal_workspaces = __esm({
75162
+ "../dashboard/src/routes/register-files-terminal-workspaces.ts"() {
75163
+ "use strict";
75164
+ }
75165
+ });
75166
+
75167
+ // ../dashboard/src/routes/register-agents-projects-nodes.ts
75168
+ var init_register_agents_projects_nodes = __esm({
75169
+ "../dashboard/src/routes/register-agents-projects-nodes.ts"() {
75170
+ "use strict";
75171
+ }
75172
+ });
75173
+
75174
+ // ../dashboard/src/routes/register-plugins-automation.ts
75175
+ var init_register_plugins_automation = __esm({
75176
+ "../dashboard/src/routes/register-plugins-automation.ts"() {
75177
+ "use strict";
75178
+ }
75179
+ });
75180
+
75181
+ // ../dashboard/src/routes/register-proxy.ts
75182
+ var init_register_proxy = __esm({
75183
+ "../dashboard/src/routes/register-proxy.ts"() {
75184
+ "use strict";
75185
+ init_api_error();
75186
+ }
75187
+ });
75188
+
75189
+ // ../dashboard/src/mission-routes.ts
75190
+ import { Router as Router3 } from "express";
75191
+ var init_mission_routes = __esm({
75192
+ "../dashboard/src/mission-routes.ts"() {
75193
+ "use strict";
75194
+ init_project_store_resolver();
75195
+ init_src();
75196
+ init_sse_buffer();
75197
+ init_api_error();
75198
+ }
75199
+ });
75200
+
75201
+ // ../dashboard/src/roadmap-suggestions.ts
75202
+ var init_roadmap_suggestions = __esm({
75203
+ "../dashboard/src/roadmap-suggestions.ts"() {
75204
+ "use strict";
75205
+ }
75206
+ });
75207
+
75208
+ // ../dashboard/src/roadmap-routes.ts
75209
+ import { Router as Router4 } from "express";
75210
+ var init_roadmap_routes = __esm({
75211
+ "../dashboard/src/roadmap-routes.ts"() {
75212
+ "use strict";
75213
+ init_api_error();
75214
+ init_roadmap_suggestions();
75215
+ init_project_store_resolver();
75216
+ }
75217
+ });
75218
+
75219
+ // ../dashboard/src/insights-routes.ts
75220
+ import { Router as Router5 } from "express";
75221
+ var init_insights_routes = __esm({
75222
+ "../dashboard/src/insights-routes.ts"() {
75223
+ "use strict";
75224
+ init_api_error();
75225
+ }
75226
+ });
75227
+
74167
75228
  // ../dashboard/src/dev-server-detect.ts
74168
75229
  var init_dev_server_detect = __esm({
74169
75230
  "../dashboard/src/dev-server-detect.ts"() {
@@ -74197,7 +75258,7 @@ var init_dev_server_process = __esm({
74197
75258
  });
74198
75259
 
74199
75260
  // ../dashboard/src/dev-server-routes.ts
74200
- import { Router as Router4 } from "express";
75261
+ import { Router as Router6 } from "express";
74201
75262
  var init_dev_server_routes = __esm({
74202
75263
  "../dashboard/src/dev-server-routes.ts"() {
74203
75264
  "use strict";
@@ -74208,26 +75269,18 @@ var init_dev_server_routes = __esm({
74208
75269
  }
74209
75270
  });
74210
75271
 
74211
- // ../dashboard/src/rate-limit.ts
74212
- var init_rate_limit = __esm({
74213
- "../dashboard/src/rate-limit.ts"() {
74214
- "use strict";
74215
- init_api_error();
74216
- }
74217
- });
74218
-
74219
- // ../dashboard/src/plugin-routes.ts
74220
- import { Router as Router5 } from "express";
74221
- var init_plugin_routes = __esm({
74222
- "../dashboard/src/plugin-routes.ts"() {
75272
+ // ../dashboard/src/routes/register-integrated-routers.ts
75273
+ var init_register_integrated_routers = __esm({
75274
+ "../dashboard/src/routes/register-integrated-routers.ts"() {
74223
75275
  "use strict";
74224
- init_src();
74225
- init_api_error();
75276
+ init_mission_routes();
75277
+ init_roadmap_routes();
75278
+ init_insights_routes();
75279
+ init_dev_server_routes();
74226
75280
  }
74227
75281
  });
74228
75282
 
74229
75283
  // ../dashboard/src/routes.ts
74230
- import { Router as Router6 } from "express";
74231
75284
  import multer from "multer";
74232
75285
  import * as fsPromises from "node:fs/promises";
74233
75286
  import { execFile as execFile3 } from "node:child_process";
@@ -74256,9 +75309,6 @@ var init_routes = __esm({
74256
75309
  init_file_service();
74257
75310
  init_usage();
74258
75311
  init_github_webhooks();
74259
- init_mission_routes();
74260
- init_roadmap_routes();
74261
- init_insights_routes();
74262
75312
  init_project_store_resolver();
74263
75313
  init_ai_session_store();
74264
75314
  init_planning();
@@ -74267,13 +75317,23 @@ var init_routes = __esm({
74267
75317
  init_mission_interview();
74268
75318
  init_milestone_slice_interview();
74269
75319
  init_sse_buffer();
74270
- init_dev_server_routes();
74271
75320
  init_api_error();
74272
75321
  init_rate_limit();
74273
75322
  init_plugin_routes();
74274
75323
  init_auth_paths();
74275
75324
  init_runtime_logger();
74276
75325
  init_ai_session_diagnostics();
75326
+ init_context();
75327
+ init_register_tasks();
75328
+ init_register_planning_chat();
75329
+ init_register_settings_memory();
75330
+ init_register_messaging_scripts();
75331
+ init_register_git_github();
75332
+ init_register_files_terminal_workspaces();
75333
+ init_register_agents_projects_nodes();
75334
+ init_register_plugins_automation();
75335
+ init_register_proxy();
75336
+ init_register_integrated_routers();
74277
75337
  ({
74278
75338
  mkdtemp,
74279
75339
  access: access3,
@@ -76517,7 +77577,7 @@ var require_extension = __commonJS({
76517
77577
  var require_websocket = __commonJS({
76518
77578
  "../../node_modules/.pnpm/ws@8.20.0/node_modules/ws/lib/websocket.js"(exports, module) {
76519
77579
  "use strict";
76520
- var EventEmitter24 = __require("events");
77580
+ var EventEmitter25 = __require("events");
76521
77581
  var https = __require("https");
76522
77582
  var http = __require("http");
76523
77583
  var net = __require("net");
@@ -76549,7 +77609,7 @@ var require_websocket = __commonJS({
76549
77609
  var protocolVersions = [8, 13];
76550
77610
  var readyStates = ["CONNECTING", "OPEN", "CLOSING", "CLOSED"];
76551
77611
  var subprotocolRegex = /^[!#$%&'*+\-.0-9A-Z^_`|a-z~]+$/;
76552
- var WebSocket2 = class _WebSocket extends EventEmitter24 {
77612
+ var WebSocket2 = class _WebSocket extends EventEmitter25 {
76553
77613
  /**
76554
77614
  * Create a new `WebSocket`.
76555
77615
  *
@@ -77546,7 +78606,7 @@ var require_subprotocol = __commonJS({
77546
78606
  var require_websocket_server = __commonJS({
77547
78607
  "../../node_modules/.pnpm/ws@8.20.0/node_modules/ws/lib/websocket-server.js"(exports, module) {
77548
78608
  "use strict";
77549
- var EventEmitter24 = __require("events");
78609
+ var EventEmitter25 = __require("events");
77550
78610
  var http = __require("http");
77551
78611
  var { Duplex } = __require("stream");
77552
78612
  var { createHash: createHash5 } = __require("crypto");
@@ -77559,7 +78619,7 @@ var require_websocket_server = __commonJS({
77559
78619
  var RUNNING = 0;
77560
78620
  var CLOSING = 1;
77561
78621
  var CLOSED = 2;
77562
- var WebSocketServer2 = class extends EventEmitter24 {
78622
+ var WebSocketServer2 = class extends EventEmitter25 {
77563
78623
  /**
77564
78624
  * Create a `WebSocketServer` instance.
77565
78625
  *
@@ -77975,7 +79035,7 @@ var init_terminal_websocket_diagnostics = __esm({
77975
79035
  });
77976
79036
 
77977
79037
  // ../dashboard/src/chat.ts
77978
- import { EventEmitter as EventEmitter23 } from "node:events";
79038
+ import { EventEmitter as EventEmitter24 } from "node:events";
77979
79039
  var defaultDiagnostics, _diagnostics, diagnostics7, RATE_LIMIT_WINDOW_MS6, MAX_REFERENCED_FILE_SIZE, ChatStreamManager, chatStreamManager;
77980
79040
  var init_chat = __esm({
77981
79041
  "../dashboard/src/chat.ts"() {
@@ -78007,7 +79067,7 @@ var init_chat = __esm({
78007
79067
  };
78008
79068
  RATE_LIMIT_WINDOW_MS6 = 60 * 1e3;
78009
79069
  MAX_REFERENCED_FILE_SIZE = 50 * 1024;
78010
- ChatStreamManager = class extends EventEmitter23 {
79070
+ ChatStreamManager = class extends EventEmitter24 {
78011
79071
  constructor(bufferSize = 100) {
78012
79072
  super();
78013
79073
  this.bufferSize = bufferSize;
@@ -78199,6 +79259,7 @@ var init_src3 = __esm({
78199
79259
  "../dashboard/src/index.ts"() {
78200
79260
  "use strict";
78201
79261
  init_server();
79262
+ init_dev_server_routes();
78202
79263
  init_runtime_logger();
78203
79264
  init_skills_adapter();
78204
79265
  init_github();