@runfusion/fusion 0.18.0 → 0.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/dist/bin.js +1840 -716
  2. package/dist/client/assets/{ChatView-BomXmqar.js → ChatView-DEG93wpC.js} +1 -1
  3. package/dist/client/assets/{DevServerView-yFvF4xL4.js → DevServerView-DI71QIND.js} +1 -1
  4. package/dist/client/assets/{DirectoryPicker-BDNodhtF.js → DirectoryPicker-6eBfMR3k.js} +1 -1
  5. package/dist/client/assets/{DocumentsView-CAWtDEaL.js → DocumentsView-D9pxwmaa.js} +1 -1
  6. package/dist/client/assets/{InsightsView-CDkiJeW1.js → InsightsView-D2_XwizY.js} +1 -1
  7. package/dist/client/assets/{MemoryView-ZRQ9EL9H.js → MemoryView-DfjllRpZ.js} +1 -1
  8. package/dist/client/assets/{NodesView-DosrOyeH.js → NodesView-D7hWWUCW.js} +1 -1
  9. package/dist/client/assets/{PiExtensionsManager-CzZ1LEpz.js → PiExtensionsManager-d8cJKjcL.js} +2 -2
  10. package/dist/client/assets/{PluginManager-Dp3vPsMO.js → PluginManager-CNzhmPzJ.js} +1 -1
  11. package/dist/client/assets/{ResearchView-PvNkdaQE.js → ResearchView-2xAa3pzZ.js} +1 -1
  12. package/dist/client/assets/{RoadmapsView-BUW-HJz5.js → RoadmapsView-ajwwf979.js} +1 -1
  13. package/dist/client/assets/SettingsModal-BWe0KrGY.css +1 -0
  14. package/dist/client/assets/{SettingsModal-ByVl_fUi.js → SettingsModal-D732WMft.js} +1 -1
  15. package/dist/client/assets/SettingsModal-Dk0zKdTy.js +31 -0
  16. package/dist/client/assets/{SetupWizardModal-DH1hpyiP.js → SetupWizardModal-DohGTvQT.js} +1 -1
  17. package/dist/client/assets/{SkillsView-B-RqQSFE.js → SkillsView-CzVO7yTO.js} +1 -1
  18. package/dist/client/assets/index-CVCt2pCH.css +1 -0
  19. package/dist/client/assets/index-hnO5QagU.js +1239 -0
  20. package/dist/client/assets/{users-WyHhw14V.js → users-R3_m9pE5.js} +1 -1
  21. package/dist/client/index.html +2 -2
  22. package/dist/client/version.json +1 -1
  23. package/dist/droid-cli/package.json +1 -1
  24. package/dist/droid-cli/src/__tests__/provider.test.ts +1 -1
  25. package/dist/extension.js +627 -141
  26. package/dist/pi-claude-cli/package.json +1 -1
  27. package/dist/plugins/fusion-plugin-dependency-graph/package.json +1 -1
  28. package/package.json +3 -2
  29. package/skill/fusion/references/engine-tools.md +2 -0
  30. package/dist/client/assets/SettingsModal-oOnIed5O.css +0 -1
  31. package/dist/client/assets/SettingsModal-uzo470XS.js +0 -31
  32. package/dist/client/assets/index-CtiRbTNv.js +0 -1229
  33. package/dist/client/assets/index-Dy-xC2C2.css +0 -1
package/dist/extension.js CHANGED
@@ -91,6 +91,7 @@ var init_settings_schema = __esm({
91
91
  modelOnboardingComplete: void 0,
92
92
  useClaudeCli: void 0,
93
93
  useDroidCli: void 0,
94
+ useLlamaCpp: void 0,
94
95
  // Global baseline lanes for per-role model selection
95
96
  executionGlobalProvider: void 0,
96
97
  executionGlobalModelId: void 0,
@@ -2771,7 +2772,7 @@ var init_db = __esm({
2771
2772
  "use strict";
2772
2773
  init_sqlite_adapter();
2773
2774
  init_types();
2774
- SCHEMA_VERSION = 60;
2775
+ SCHEMA_VERSION = 61;
2775
2776
  SCHEMA_SQL = `
2776
2777
  -- Tasks table with JSON columns for nested data
2777
2778
  CREATE TABLE IF NOT EXISTS tasks (
@@ -4643,6 +4644,21 @@ This means a caller passed a .fusion directory where a project root was expected
4643
4644
  this.db.exec(`CREATE INDEX IF NOT EXISTS idxTasksPausedByAgentId ON tasks(pausedByAgentId)`);
4644
4645
  });
4645
4646
  }
4647
+ if (version < 61) {
4648
+ this.applyMigration(61, () => {
4649
+ this.db.exec(`
4650
+ CREATE TABLE IF NOT EXISTS verification_cache (
4651
+ treeSha TEXT NOT NULL,
4652
+ testCommand TEXT NOT NULL DEFAULT '',
4653
+ buildCommand TEXT NOT NULL DEFAULT '',
4654
+ recordedAt TEXT NOT NULL,
4655
+ taskId TEXT,
4656
+ PRIMARY KEY (treeSha, testCommand, buildCommand)
4657
+ )
4658
+ `);
4659
+ this.db.exec(`CREATE INDEX IF NOT EXISTS idxVerificationCacheRecordedAt ON verification_cache(recordedAt)`);
4660
+ });
4661
+ }
4646
4662
  }
4647
4663
  /**
4648
4664
  * Run a single migration step inside a transaction and bump the version.
@@ -5078,6 +5094,33 @@ var init_agent_store = __esm({
5078
5094
  `).run(migrationKey, migrationVersion);
5079
5095
  this.db.bumpLastModified();
5080
5096
  }
5097
+ /**
5098
+ * Find the first non-ephemeral agent by exact name.
5099
+ *
5100
+ * Ephemeral task-worker/spawned agents are excluded so callers can use this
5101
+ * for durable identity checks without transient runtime workers conflicting.
5102
+ *
5103
+ * @param name - Agent name to match exactly
5104
+ * @returns Matching non-ephemeral agent, or null when none exists
5105
+ */
5106
+ async findAgentByName(name) {
5107
+ const rows = this.db.prepare("SELECT * FROM agents WHERE name = ? ORDER BY createdAt DESC").all(name);
5108
+ for (const row of rows) {
5109
+ const agent = this.mapAgentRow(row);
5110
+ if (!isEphemeralAgent(agent)) {
5111
+ return agent;
5112
+ }
5113
+ }
5114
+ return null;
5115
+ }
5116
+ async hasNonEphemeralAgentWithName(name) {
5117
+ const normalizedName = name.trim();
5118
+ if (!normalizedName) {
5119
+ return false;
5120
+ }
5121
+ const existing = await this.findAgentByName(normalizedName);
5122
+ return existing !== null;
5123
+ }
5081
5124
  /**
5082
5125
  * Create a new agent with "idle" state.
5083
5126
  *
@@ -5090,9 +5133,12 @@ var init_agent_store = __esm({
5090
5133
  * same default (1h) to both at runtime. Writing the default explicitly
5091
5134
  * removes that divergence and keeps the persisted config truthful.
5092
5135
  *
5136
+ * Also enforces non-ephemeral name uniqueness: durable agents cannot share a
5137
+ * name, while ephemeral task-worker agents are allowed to duplicate names.
5138
+ *
5093
5139
  * @param input - Creation parameters
5094
5140
  * @returns The created agent
5095
- * @throws Error if input is invalid
5141
+ * @throws Error if input is invalid or a duplicate non-ephemeral name exists
5096
5142
  */
5097
5143
  async createAgent(input) {
5098
5144
  if (!input.name?.trim()) {
@@ -5101,15 +5147,22 @@ var init_agent_store = __esm({
5101
5147
  if (!input.role) {
5102
5148
  throw new Error("Agent role is required");
5103
5149
  }
5150
+ const normalizedName = input.name.trim();
5151
+ const metadata = input.metadata ?? {};
5152
+ const ephemeral = isEphemeralAgent({ metadata, name: input.name, role: input.role, reportsTo: input.reportsTo });
5153
+ if (!ephemeral) {
5154
+ const existing = await this.findAgentByName(normalizedName);
5155
+ if (existing) {
5156
+ throw new Error(`Agent with name "${normalizedName}" already exists (agentId: ${existing.id})`);
5157
+ }
5158
+ }
5104
5159
  const now = (/* @__PURE__ */ new Date()).toISOString();
5105
5160
  const agentId = `agent-${randomUUID().slice(0, 8)}`;
5106
- const metadata = input.metadata ?? {};
5107
5161
  const runtimeConfig = resolveCreationRuntimeConfig(input.runtimeConfig, metadata);
5108
- const ephemeral = isEphemeralAgent({ metadata, name: input.name, role: input.role, reportsTo: input.reportsTo });
5109
5162
  const resolvedHeartbeatProcedurePath = input.heartbeatProcedurePath ?? (ephemeral ? void 0 : getDefaultHeartbeatProcedurePath(agentId, input.name));
5110
5163
  const agent = {
5111
5164
  id: agentId,
5112
- name: input.name.trim(),
5165
+ name: normalizedName,
5113
5166
  role: input.role,
5114
5167
  state: "idle",
5115
5168
  createdAt: now,
@@ -10549,50 +10602,40 @@ ${feature.acceptanceCriteria}`);
10549
10602
  }
10550
10603
  }
10551
10604
  // ── ID Generators ───────────────────────────────────────────────────
10552
- generateMissionId() {
10553
- const timestamp = Date.now();
10605
+ idSequence = 0;
10606
+ generateId(prefix) {
10607
+ const timestamp = Date.now().toString(36).toUpperCase();
10608
+ this.idSequence += 1;
10609
+ const sequence = this.idSequence.toString(36).toUpperCase().padStart(4, "0");
10554
10610
  const random = Math.random().toString(36).substring(2, 6).toUpperCase();
10555
- return `M-${timestamp.toString(36).toUpperCase()}-${random}`;
10611
+ return `${prefix}-${timestamp}-${sequence}-${random}`;
10612
+ }
10613
+ generateMissionId() {
10614
+ return this.generateId("M");
10556
10615
  }
10557
10616
  generateMilestoneId() {
10558
- const timestamp = Date.now();
10559
- const random = Math.random().toString(36).substring(2, 6).toUpperCase();
10560
- return `MS-${timestamp.toString(36).toUpperCase()}-${random}`;
10617
+ return this.generateId("MS");
10561
10618
  }
10562
10619
  generateSliceId() {
10563
- const timestamp = Date.now();
10564
- const random = Math.random().toString(36).substring(2, 6).toUpperCase();
10565
- return `SL-${timestamp.toString(36).toUpperCase()}-${random}`;
10620
+ return this.generateId("SL");
10566
10621
  }
10567
10622
  generateFeatureId() {
10568
- const timestamp = Date.now();
10569
- const random = Math.random().toString(36).substring(2, 6).toUpperCase();
10570
- return `F-${timestamp.toString(36).toUpperCase()}-${random}`;
10623
+ return this.generateId("F");
10571
10624
  }
10572
10625
  generateMissionEventId() {
10573
- const timestamp = Date.now();
10574
- const random = Math.random().toString(36).substring(2, 6).toUpperCase();
10575
- return `ME-${timestamp.toString(36).toUpperCase()}-${random}`;
10626
+ return this.generateId("ME");
10576
10627
  }
10577
10628
  generateAssertionId() {
10578
- const timestamp = Date.now();
10579
- const random = Math.random().toString(36).substring(2, 6).toUpperCase();
10580
- return `CA-${timestamp.toString(36).toUpperCase()}-${random}`;
10629
+ return this.generateId("CA");
10581
10630
  }
10582
10631
  generateValidatorRunId() {
10583
- const timestamp = Date.now();
10584
- const random = Math.random().toString(36).substring(2, 6).toUpperCase();
10585
- return `VR-${timestamp.toString(36).toUpperCase()}-${random}`;
10632
+ return this.generateId("VR");
10586
10633
  }
10587
10634
  generateFailureId() {
10588
- const timestamp = Date.now();
10589
- const random = Math.random().toString(36).substring(2, 6).toUpperCase();
10590
- return `VF-${timestamp.toString(36).toUpperCase()}-${random}`;
10635
+ return this.generateId("VF");
10591
10636
  }
10592
10637
  generateLineageId() {
10593
- const timestamp = Date.now();
10594
- const random = Math.random().toString(36).substring(2, 6).toUpperCase();
10595
- return `FL-${timestamp.toString(36).toUpperCase()}-${random}`;
10638
+ return this.generateId("FL");
10596
10639
  }
10597
10640
  };
10598
10641
  }
@@ -19467,10 +19510,10 @@ var init_central_core = __esm({
19467
19510
  */
19468
19511
  async generateProjectName(projectPath) {
19469
19512
  try {
19470
- const { execFile: execFile7 } = await import("node:child_process");
19513
+ const { execFile: execFile8 } = await import("node:child_process");
19471
19514
  const { promisify: promisify14 } = await import("node:util");
19472
- const execFileAsync5 = promisify14(execFile7);
19473
- const { stdout } = await execFileAsync5(
19515
+ const execFileAsync6 = promisify14(execFile8);
19516
+ const { stdout } = await execFileAsync6(
19474
19517
  "git",
19475
19518
  ["remote", "get-url", "origin"],
19476
19519
  { cwd: projectPath, timeout: 5e3 }
@@ -20138,10 +20181,10 @@ var init_migration = __esm({
20138
20181
  return basename3(projectPath);
20139
20182
  }
20140
20183
  try {
20141
- const { execFile: execFile7 } = await import("node:child_process");
20184
+ const { execFile: execFile8 } = await import("node:child_process");
20142
20185
  const { promisify: promisify14 } = await import("node:util");
20143
- const execFileAsync5 = promisify14(execFile7);
20144
- const { stdout } = await execFileAsync5(
20186
+ const execFileAsync6 = promisify14(execFile8);
20187
+ const { stdout } = await execFileAsync6(
20145
20188
  "git",
20146
20189
  ["remote", "get-url", "origin"],
20147
20190
  { cwd: projectPath, timeout: 1e3 }
@@ -30845,13 +30888,13 @@ async function searchWithQmd(rootDir, options) {
30845
30888
  const command = "qmd";
30846
30889
  const limit = Math.max(1, Math.min(options.limit ?? 5, 20));
30847
30890
  try {
30848
- const { execFile: execFile7 } = await import("node:child_process");
30891
+ const { execFile: execFile8 } = await import("node:child_process");
30849
30892
  const { promisify: promisify14 } = await import("node:util");
30850
- const execFileAsync5 = promisify14(execFile7);
30851
- await ensureQmdProjectMemoryCollection(rootDir, execFileAsync5);
30893
+ const execFileAsync6 = promisify14(execFile8);
30894
+ await ensureQmdProjectMemoryCollection(rootDir, execFileAsync6);
30852
30895
  scheduleQmdProjectMemoryRefresh(rootDir);
30853
30896
  const args = buildQmdSearchArgs(rootDir, options);
30854
- const { stdout } = await execFileAsync5(command, args, {
30897
+ const { stdout } = await execFileAsync6(command, args, {
30855
30898
  cwd: rootDir,
30856
30899
  timeout: 4e3,
30857
30900
  maxBuffer: 1024 * 1024
@@ -30876,12 +30919,12 @@ async function searchWithQmd(rootDir, options) {
30876
30919
  return [];
30877
30920
  }
30878
30921
  }
30879
- async function ensureQmdProjectMemoryCollection(rootDir, execFileAsync5) {
30922
+ async function ensureQmdProjectMemoryCollection(rootDir, execFileAsync6) {
30880
30923
  const collectionName = qmdMemoryCollectionName(rootDir);
30881
30924
  const memoryDir = memoryWorkspacePath(rootDir);
30882
30925
  await mkdir6(memoryDir, { recursive: true });
30883
30926
  try {
30884
- await execFileAsync5("qmd", buildQmdCollectionAddArgs(rootDir), {
30927
+ await execFileAsync6("qmd", buildQmdCollectionAddArgs(rootDir), {
30885
30928
  cwd: rootDir,
30886
30929
  timeout: 4e3,
30887
30930
  maxBuffer: 512 * 1024
@@ -30897,9 +30940,9 @@ ${stderr}`)) {
30897
30940
  return collectionName;
30898
30941
  }
30899
30942
  async function getDefaultExecFileAsync() {
30900
- const { execFile: execFile7 } = await import("node:child_process");
30943
+ const { execFile: execFile8 } = await import("node:child_process");
30901
30944
  const { promisify: promisify14 } = await import("node:util");
30902
- return promisify14(execFile7);
30945
+ return promisify14(execFile8);
30903
30946
  }
30904
30947
  async function refreshQmdProjectMemoryIndex(rootDir, options) {
30905
30948
  const key = resolve6(rootDir);
@@ -30914,14 +30957,14 @@ async function refreshQmdProjectMemoryIndex(rootDir, options) {
30914
30957
  }
30915
30958
  }
30916
30959
  const promise = (async () => {
30917
- const execFileAsync5 = options?.execFileAsync ?? await getDefaultExecFileAsync();
30918
- await ensureQmdProjectMemoryCollection(rootDir, execFileAsync5);
30919
- await execFileAsync5("qmd", ["update"], {
30960
+ const execFileAsync6 = options?.execFileAsync ?? await getDefaultExecFileAsync();
30961
+ await ensureQmdProjectMemoryCollection(rootDir, execFileAsync6);
30962
+ await execFileAsync6("qmd", ["update"], {
30920
30963
  cwd: rootDir,
30921
30964
  timeout: 3e4,
30922
30965
  maxBuffer: 1024 * 1024
30923
30966
  });
30924
- await execFileAsync5("qmd", ["embed"], {
30967
+ await execFileAsync6("qmd", ["embed"], {
30925
30968
  cwd: rootDir,
30926
30969
  timeout: 12e4,
30927
30970
  maxBuffer: 1024 * 1024
@@ -30946,8 +30989,8 @@ function scheduleQmdProjectMemoryRefresh(rootDir) {
30946
30989
  }
30947
30990
  async function isQmdAvailable() {
30948
30991
  try {
30949
- const execFileAsync5 = await getDefaultExecFileAsync();
30950
- await execFileAsync5("qmd", ["--help"], {
30992
+ const execFileAsync6 = await getDefaultExecFileAsync();
30993
+ await execFileAsync6("qmd", ["--help"], {
30951
30994
  timeout: 3e3,
30952
30995
  maxBuffer: 128 * 1024
30953
30996
  });
@@ -30957,12 +31000,12 @@ async function isQmdAvailable() {
30957
31000
  }
30958
31001
  }
30959
31002
  async function installQmd(options) {
30960
- const execFileAsync5 = options?.execFileAsync ?? await getDefaultExecFileAsync();
31003
+ const execFileAsync6 = options?.execFileAsync ?? await getDefaultExecFileAsync();
30961
31004
  const [command, ...args] = QMD_INSTALL_COMMAND.split(" ");
30962
31005
  if (!command || args.length === 0) {
30963
31006
  throw new MemoryBackendError("BACKEND_UNAVAILABLE", "qmd install command is not configured", "qmd");
30964
31007
  }
30965
- await execFileAsync5(command, args, {
31008
+ await execFileAsync6(command, args, {
30966
31009
  timeout: 12e4,
30967
31010
  maxBuffer: 1024 * 1024
30968
31011
  });
@@ -37222,6 +37265,42 @@ ${notificationsSection}`;
37222
37265
  }
37223
37266
  return this.todoStore;
37224
37267
  }
37268
+ // ── Verification Cache ────────────────────────────────────────────────────
37269
+ /**
37270
+ * Look up a previously recorded verification cache pass for a given tree sha
37271
+ * and command pair. Returns null when no cached pass exists.
37272
+ *
37273
+ * @param treeSha - The git tree SHA of the merged commit.
37274
+ * @param testCommand - The test command string (normalized to empty string when absent).
37275
+ * @param buildCommand - The build command string (normalized to empty string when absent).
37276
+ */
37277
+ getVerificationCacheHit(treeSha, testCommand, buildCommand2) {
37278
+ const normalizedTest = testCommand ?? "";
37279
+ const normalizedBuild = buildCommand2 ?? "";
37280
+ const row = this.db.prepare(
37281
+ `SELECT recordedAt, taskId FROM verification_cache
37282
+ WHERE treeSha = ? AND testCommand = ? AND buildCommand = ?`
37283
+ ).get(treeSha, normalizedTest, normalizedBuild);
37284
+ return row ?? null;
37285
+ }
37286
+ /**
37287
+ * Record a successful verification pass for the given tree sha and commands.
37288
+ * Uses INSERT OR REPLACE so a re-run of the same tree updates the timestamp.
37289
+ *
37290
+ * @param treeSha - The git tree SHA of the merged commit.
37291
+ * @param testCommand - The test command string (normalized to empty string when absent).
37292
+ * @param buildCommand - The build command string (normalized to empty string when absent).
37293
+ * @param taskId - The task ID that triggered the pass (for telemetry).
37294
+ */
37295
+ recordVerificationCachePass(treeSha, testCommand, buildCommand2, taskId) {
37296
+ const normalizedTest = testCommand ?? "";
37297
+ const normalizedBuild = buildCommand2 ?? "";
37298
+ const recordedAt = (/* @__PURE__ */ new Date()).toISOString();
37299
+ this.db.prepare(
37300
+ `INSERT OR REPLACE INTO verification_cache (treeSha, testCommand, buildCommand, recordedAt, taskId)
37301
+ VALUES (?, ?, ?, ?, ?)`
37302
+ ).run(treeSha, normalizedTest, normalizedBuild, recordedAt, taskId);
37303
+ }
37225
37304
  // ── Backward Compatibility (Multi-Project Support) ────────────────────────
37226
37305
  };
37227
37306
  }
@@ -40589,7 +40668,7 @@ var init_docker_provisioning = __esm({
40589
40668
  });
40590
40669
 
40591
40670
  // ../core/src/memory-insights.ts
40592
- import { readFile as readFile11, writeFile as writeFile8, mkdir as mkdir9 } from "node:fs/promises";
40671
+ import { readFile as readFile11, writeFile as writeFile8, mkdir as mkdir9, unlink as unlink5 } from "node:fs/promises";
40593
40672
  import { existsSync as existsSync17 } from "node:fs";
40594
40673
  import { dirname as dirname7, join as join19 } from "node:path";
40595
40674
  async function readWorkingMemory(rootDir) {
@@ -40599,7 +40678,35 @@ async function readWorkingMemory(rootDir) {
40599
40678
  }
40600
40679
  return readFile11(filePath, "utf-8");
40601
40680
  }
40681
+ async function migrateLegacyArtifactIfNeeded(rootDir, canonicalPath, legacyPath) {
40682
+ const canonicalFilePath = join19(rootDir, canonicalPath);
40683
+ const legacyFilePath = join19(rootDir, legacyPath);
40684
+ if (existsSync17(canonicalFilePath) || !existsSync17(legacyFilePath)) {
40685
+ return;
40686
+ }
40687
+ const content = await readFile11(legacyFilePath, "utf-8");
40688
+ const canonicalDir = dirname7(canonicalFilePath);
40689
+ if (!existsSync17(canonicalDir)) {
40690
+ await mkdir9(canonicalDir, { recursive: true });
40691
+ }
40692
+ await writeFile8(canonicalFilePath, content, "utf-8");
40693
+ try {
40694
+ await unlink5(legacyFilePath);
40695
+ } catch {
40696
+ }
40697
+ }
40698
+ async function removeLegacyArtifactIfPresent(rootDir, legacyPath) {
40699
+ const legacyFilePath = join19(rootDir, legacyPath);
40700
+ if (!existsSync17(legacyFilePath)) {
40701
+ return;
40702
+ }
40703
+ try {
40704
+ await unlink5(legacyFilePath);
40705
+ } catch {
40706
+ }
40707
+ }
40602
40708
  async function readInsightsMemory(rootDir) {
40709
+ await migrateLegacyArtifactIfNeeded(rootDir, MEMORY_INSIGHTS_PATH, LEGACY_MEMORY_INSIGHTS_PATH);
40603
40710
  const filePath = join19(rootDir, MEMORY_INSIGHTS_PATH);
40604
40711
  if (!existsSync17(filePath)) {
40605
40712
  return null;
@@ -40608,11 +40715,12 @@ async function readInsightsMemory(rootDir) {
40608
40715
  }
40609
40716
  async function writeInsightsMemory(rootDir, content) {
40610
40717
  const filePath = join19(rootDir, MEMORY_INSIGHTS_PATH);
40611
- const dir = join19(rootDir, ".fusion");
40718
+ const dir = dirname7(filePath);
40612
40719
  if (!existsSync17(dir)) {
40613
40720
  await mkdir9(dir, { recursive: true });
40614
40721
  }
40615
40722
  await writeFile8(filePath, content, "utf-8");
40723
+ await removeLegacyArtifactIfPresent(rootDir, LEGACY_MEMORY_INSIGHTS_PATH);
40616
40724
  }
40617
40725
  async function writeWorkingMemory(rootDir, content) {
40618
40726
  const filePath = join19(rootDir, MEMORY_WORKING_PATH);
@@ -40623,6 +40731,7 @@ async function writeWorkingMemory(rootDir, content) {
40623
40731
  await writeFile8(filePath, content, "utf-8");
40624
40732
  }
40625
40733
  async function readMemoryAudit(rootDir) {
40734
+ await migrateLegacyArtifactIfNeeded(rootDir, MEMORY_AUDIT_PATH, LEGACY_MEMORY_AUDIT_PATH);
40626
40735
  const filePath = join19(rootDir, MEMORY_AUDIT_PATH);
40627
40736
  if (!existsSync17(filePath)) {
40628
40737
  return null;
@@ -40631,13 +40740,15 @@ async function readMemoryAudit(rootDir) {
40631
40740
  }
40632
40741
  async function writeMemoryAudit(rootDir, content) {
40633
40742
  const filePath = join19(rootDir, MEMORY_AUDIT_PATH);
40634
- const dir = join19(rootDir, ".fusion");
40743
+ const dir = dirname7(filePath);
40635
40744
  if (!existsSync17(dir)) {
40636
40745
  await mkdir9(dir, { recursive: true });
40637
40746
  }
40638
40747
  await writeFile8(filePath, content, "utf-8");
40748
+ await removeLegacyArtifactIfPresent(rootDir, LEGACY_MEMORY_AUDIT_PATH);
40639
40749
  }
40640
40750
  async function readMemoryAuditState(rootDir) {
40751
+ await migrateLegacyArtifactIfNeeded(rootDir, MEMORY_AUDIT_STATE_PATH, LEGACY_MEMORY_AUDIT_STATE_PATH);
40641
40752
  const filePath = join19(rootDir, MEMORY_AUDIT_STATE_PATH);
40642
40753
  if (!existsSync17(filePath)) {
40643
40754
  return null;
@@ -40658,11 +40769,12 @@ async function readMemoryAuditState(rootDir) {
40658
40769
  }
40659
40770
  async function writeMemoryAuditState(rootDir, state) {
40660
40771
  const filePath = join19(rootDir, MEMORY_AUDIT_STATE_PATH);
40661
- const dir = join19(rootDir, ".fusion");
40772
+ const dir = dirname7(filePath);
40662
40773
  if (!existsSync17(dir)) {
40663
40774
  await mkdir9(dir, { recursive: true });
40664
40775
  }
40665
40776
  await writeFile8(filePath, JSON.stringify(state, null, 2), "utf-8");
40777
+ await removeLegacyArtifactIfPresent(rootDir, LEGACY_MEMORY_AUDIT_STATE_PATH);
40666
40778
  }
40667
40779
  function isValidExtractionMetadata(value) {
40668
40780
  if (!value || typeof value !== "object") {
@@ -40937,7 +41049,7 @@ function createInsightExtractionAutomation(settings, modelProvider, modelId) {
40937
41049
  ## Instructions
40938
41050
 
40939
41051
  1. Read the working memory file at \`.fusion/memory/MEMORY.md\` using your file reading tools
40940
- 2. Read the existing insights file at \`.fusion/memory-insights.md\` (it may not exist yet)
41052
+ 2. Read the existing insights file at \`.fusion/memory/memory-insights.md\` (it may not exist yet)
40941
41053
  3. Analyze the working memory content and identify:
40942
41054
  a) **New insights** that should be preserved in long-term memory
40943
41055
  b) **Durable content** that should remain in working memory
@@ -41241,7 +41353,7 @@ async function generateMemoryAudit(rootDir, lastExtraction, pruningOutcome) {
41241
41353
  id: "insights-memory-exists",
41242
41354
  name: "Insights memory file exists",
41243
41355
  passed: false,
41244
- details: "File .fusion/memory-insights.md does not exist yet"
41356
+ details: "File .fusion/memory/memory-insights.md does not exist yet"
41245
41357
  });
41246
41358
  }
41247
41359
  if (insightsMemoryExists) {
@@ -41487,14 +41599,17 @@ async function processAndAuditInsightExtraction(rootDir, input) {
41487
41599
  }
41488
41600
  return auditReport;
41489
41601
  }
41490
- var MEMORY_WORKING_PATH, MEMORY_INSIGHTS_PATH, MEMORY_AUDIT_PATH, MEMORY_AUDIT_STATE_PATH, DEFAULT_INSIGHT_SCHEDULE, DEFAULT_MIN_INTERVAL_MS, MIN_INSIGHT_GROWTH_CHARS, INSIGHT_EXTRACTION_SCHEDULE_NAME, REQUIRED_MEMORY_SECTIONS;
41602
+ var MEMORY_WORKING_PATH, MEMORY_INSIGHTS_PATH, MEMORY_AUDIT_PATH, MEMORY_AUDIT_STATE_PATH, LEGACY_MEMORY_INSIGHTS_PATH, LEGACY_MEMORY_AUDIT_PATH, LEGACY_MEMORY_AUDIT_STATE_PATH, DEFAULT_INSIGHT_SCHEDULE, DEFAULT_MIN_INTERVAL_MS, MIN_INSIGHT_GROWTH_CHARS, INSIGHT_EXTRACTION_SCHEDULE_NAME, REQUIRED_MEMORY_SECTIONS;
41491
41603
  var init_memory_insights = __esm({
41492
41604
  "../core/src/memory-insights.ts"() {
41493
41605
  "use strict";
41494
41606
  MEMORY_WORKING_PATH = ".fusion/memory/MEMORY.md";
41495
- MEMORY_INSIGHTS_PATH = ".fusion/memory-insights.md";
41496
- MEMORY_AUDIT_PATH = ".fusion/memory-audit.md";
41497
- MEMORY_AUDIT_STATE_PATH = ".fusion/memory-audit-state.json";
41607
+ MEMORY_INSIGHTS_PATH = ".fusion/memory/memory-insights.md";
41608
+ MEMORY_AUDIT_PATH = ".fusion/memory/memory-audit.md";
41609
+ MEMORY_AUDIT_STATE_PATH = ".fusion/memory/memory-audit-state.json";
41610
+ LEGACY_MEMORY_INSIGHTS_PATH = ".fusion/memory-insights.md";
41611
+ LEGACY_MEMORY_AUDIT_PATH = ".fusion/memory-audit.md";
41612
+ LEGACY_MEMORY_AUDIT_STATE_PATH = ".fusion/memory-audit-state.json";
41498
41613
  DEFAULT_INSIGHT_SCHEDULE = "0 2 * * *";
41499
41614
  DEFAULT_MIN_INTERVAL_MS = 24 * 60 * 60 * 1e3;
41500
41615
  MIN_INSIGHT_GROWTH_CHARS = 1e3;
@@ -51905,12 +52020,12 @@ function resolveExtractionRoot(tempDir) {
51905
52020
  return tempDir;
51906
52021
  }
51907
52022
  async function extractTarArchive(archivePath, outputDir) {
51908
- const [{ execFile: execFile7 }, { promisify: promisify14 }] = await Promise.all([
52023
+ const [{ execFile: execFile8 }, { promisify: promisify14 }] = await Promise.all([
51909
52024
  import("node:child_process"),
51910
52025
  import("node:util")
51911
52026
  ]);
51912
- const execFileAsync5 = promisify14(execFile7);
51913
- await execFileAsync5("tar", ["xzf", archivePath, "-C", outputDir]);
52027
+ const execFileAsync6 = promisify14(execFile8);
52028
+ await execFileAsync6("tar", ["xzf", archivePath, "-C", outputDir]);
51914
52029
  }
51915
52030
  async function parseCompanyArchive(archivePath) {
51916
52031
  const resolvedArchivePath = resolve9(archivePath);
@@ -54026,7 +54141,13 @@ var init_research_orchestrator = __esm({
54026
54141
  continue;
54027
54142
  }
54028
54143
  for (const source of result.data.slice(0, Math.max(0, config.maxSources - allSources.length))) {
54029
- const saved = this.store.addSource(runId, source);
54144
+ const saved = this.store.addSource(runId, {
54145
+ ...source,
54146
+ metadata: {
54147
+ ...source.metadata ?? {},
54148
+ providerType: provider.type
54149
+ }
54150
+ });
54030
54151
  allSources.push(saved);
54031
54152
  this.store.addEvent(runId, {
54032
54153
  type: "source_added",
@@ -54053,7 +54174,9 @@ var init_research_orchestrator = __esm({
54053
54174
  sourceId: source.id
54054
54175
  });
54055
54176
  this.stepStarted(runId, step);
54056
- const result = await this.stepRunner.runContentFetch(source.reference, provider?.config, signal);
54177
+ const sourceProvider = this.getSourceProviderType(source);
54178
+ const providerConfig = sourceProvider ? config.providers.find((p) => p.type === sourceProvider)?.config : provider?.config;
54179
+ const result = await this.stepRunner.runContentFetch(source.reference, sourceProvider, providerConfig, signal);
54057
54180
  if (!result.ok || !result.data) {
54058
54181
  this.stepFailed(runId, step.id, result.error?.message ?? "Failed to fetch source content", result.error);
54059
54182
  continue;
@@ -54252,6 +54375,10 @@ var init_research_orchestrator = __esm({
54252
54375
  throw signal.reason ?? new Error("Research run aborted");
54253
54376
  }
54254
54377
  }
54378
+ getSourceProviderType(source) {
54379
+ const providerType = source.metadata?.providerType;
54380
+ return typeof providerType === "string" && providerType.length > 0 ? providerType : void 0;
54381
+ }
54255
54382
  canWriteRunData(runId) {
54256
54383
  const run = this.store.getRun(runId);
54257
54384
  if (!run) return false;
@@ -56815,8 +56942,8 @@ var init_research_step_runner = __esm({
56815
56942
  return this.classifyError("source-query", error);
56816
56943
  }
56817
56944
  }
56818
- async runContentFetch(url, config = {}, signal) {
56819
- const provider = this.findFirstConfiguredProvider();
56945
+ async runContentFetch(url, providerType, config = {}, signal) {
56946
+ const provider = this.resolveContentProvider(providerType);
56820
56947
  if (!provider) {
56821
56948
  return this.unconfigured("no configured provider available for content fetch");
56822
56949
  }
@@ -56855,6 +56982,13 @@ var init_research_step_runner = __esm({
56855
56982
  }
56856
56983
  return void 0;
56857
56984
  }
56985
+ resolveContentProvider(providerType) {
56986
+ if (providerType) {
56987
+ const selected = this.providers.get(providerType);
56988
+ if (selected?.isConfigured()) return selected;
56989
+ }
56990
+ return this.findFirstConfiguredProvider();
56991
+ }
56858
56992
  classifyError(step, error) {
56859
56993
  if (error instanceof ResearchStepTimeoutError) {
56860
56994
  return { ok: false, error: { code: "timeout", message: error.message, retryable: true } };
@@ -57067,11 +57201,11 @@ async function refreshAgentMemoryQmdIndex(rootDir, agentMemory) {
57067
57201
  return;
57068
57202
  }
57069
57203
  const promise = (async () => {
57070
- const { execFile: execFile7 } = await import("node:child_process");
57204
+ const { execFile: execFile8 } = await import("node:child_process");
57071
57205
  const { promisify: promisify14 } = await import("node:util");
57072
- const execFileAsync5 = promisify14(execFile7);
57206
+ const execFileAsync6 = promisify14(execFile8);
57073
57207
  try {
57074
- await execFileAsync5("qmd", buildQmdAgentMemoryCollectionAddArgs(rootDir, agentMemory.agentId), {
57208
+ await execFileAsync6("qmd", buildQmdAgentMemoryCollectionAddArgs(rootDir, agentMemory.agentId), {
57075
57209
  cwd: rootDir,
57076
57210
  timeout: 4e3,
57077
57211
  maxBuffer: 512 * 1024
@@ -57084,8 +57218,8 @@ ${stderr}`)) {
57084
57218
  throw error;
57085
57219
  }
57086
57220
  }
57087
- await execFileAsync5("qmd", ["update"], { cwd: rootDir, timeout: 3e4, maxBuffer: 1024 * 1024 });
57088
- await execFileAsync5("qmd", ["embed"], { cwd: rootDir, timeout: 12e4, maxBuffer: 1024 * 1024 });
57221
+ await execFileAsync6("qmd", ["update"], { cwd: rootDir, timeout: 3e4, maxBuffer: 1024 * 1024 });
57222
+ await execFileAsync6("qmd", ["embed"], { cwd: rootDir, timeout: 12e4, maxBuffer: 1024 * 1024 });
57089
57223
  })();
57090
57224
  agentQmdRefreshState.set(key, { lastStartedAt: now, inFlight: promise });
57091
57225
  try {
@@ -57133,10 +57267,10 @@ async function searchAgentMemoryWithQmd(rootDir, agentMemory, query, limit) {
57133
57267
  }
57134
57268
  try {
57135
57269
  await refreshAgentMemoryQmdIndex(rootDir, agentMemory);
57136
- const { execFile: execFile7 } = await import("node:child_process");
57270
+ const { execFile: execFile8 } = await import("node:child_process");
57137
57271
  const { promisify: promisify14 } = await import("node:util");
57138
- const execFileAsync5 = promisify14(execFile7);
57139
- const { stdout } = await execFileAsync5("qmd", buildQmdAgentMemorySearchArgs(rootDir, agentMemory.agentId, query, limit), {
57272
+ const execFileAsync6 = promisify14(execFile8);
57273
+ const { stdout } = await execFileAsync6("qmd", buildQmdAgentMemorySearchArgs(rootDir, agentMemory.agentId, query, limit), {
57140
57274
  cwd: rootDir,
57141
57275
  timeout: 4e3,
57142
57276
  maxBuffer: 1024 * 1024
@@ -57577,6 +57711,151 @@ ${lines.join("\n\n")}` }],
57577
57711
  }
57578
57712
  };
57579
57713
  }
57714
+ function createGetAgentConfigTool(agentStore, callingAgentId) {
57715
+ return {
57716
+ name: "fn_get_agent_config",
57717
+ label: "Get Agent Config",
57718
+ description: "Read full configuration for one of your direct-report agents.",
57719
+ parameters: getAgentConfigParams,
57720
+ execute: async (_id, params) => {
57721
+ const target = await agentStore.getAgent(params.agent_id);
57722
+ if (!target) {
57723
+ return {
57724
+ content: [{ type: "text", text: `ERROR: Agent ${params.agent_id} not found` }],
57725
+ details: {}
57726
+ };
57727
+ }
57728
+ if (target.reportsTo !== callingAgentId) {
57729
+ return {
57730
+ content: [{ type: "text", text: "ERROR: You can only read configuration of agents that report to you" }],
57731
+ details: {}
57732
+ };
57733
+ }
57734
+ const runtimeConfig = target.runtimeConfig ?? {};
57735
+ const lines = [
57736
+ `Agent Config: ${target.name} (${target.id})`,
57737
+ `Role: ${target.role}`,
57738
+ `State: ${target.state}`,
57739
+ `Title: ${target.title ?? "(none)"}`,
57740
+ `Icon: ${target.icon ?? "(none)"}`,
57741
+ "",
57742
+ "Soul:",
57743
+ target.soul ?? "(none)",
57744
+ "",
57745
+ "Instructions Text:",
57746
+ target.instructionsText ?? "(none)",
57747
+ `Instructions Path: ${target.instructionsPath ?? "(none)"}`,
57748
+ `Heartbeat Procedure Path: ${target.heartbeatProcedurePath ?? "(none)"}`,
57749
+ "",
57750
+ "Runtime Config:",
57751
+ `heartbeatIntervalMs: ${String(runtimeConfig.heartbeatIntervalMs ?? "(default)")}`,
57752
+ `heartbeatTimeoutMs: ${String(runtimeConfig.heartbeatTimeoutMs ?? "(default)")}`,
57753
+ `maxConcurrentRuns: ${String(runtimeConfig.maxConcurrentRuns ?? "(default)")}`,
57754
+ `messageResponseMode: ${String(runtimeConfig.messageResponseMode ?? "(default)")}`,
57755
+ `budget: ${JSON.stringify(runtimeConfig.budget ?? null)}`,
57756
+ "",
57757
+ "Memory:",
57758
+ target.memory ?? "(none)"
57759
+ ];
57760
+ return {
57761
+ content: [{ type: "text", text: lines.join("\n") }],
57762
+ details: { agent: target }
57763
+ };
57764
+ }
57765
+ };
57766
+ }
57767
+ function createUpdateAgentConfigTool(agentStore, callingAgentId) {
57768
+ return {
57769
+ name: "fn_update_agent_config",
57770
+ label: "Update Agent Config",
57771
+ description: "Update configuration for one of your direct-report agents.",
57772
+ parameters: updateAgentConfigParams,
57773
+ execute: async (_id, params) => {
57774
+ const target = await agentStore.getAgent(params.agent_id);
57775
+ if (!target) {
57776
+ return {
57777
+ content: [{ type: "text", text: `ERROR: Agent ${params.agent_id} not found` }],
57778
+ details: {}
57779
+ };
57780
+ }
57781
+ if (target.reportsTo !== callingAgentId) {
57782
+ return {
57783
+ content: [{ type: "text", text: "ERROR: You can only update configuration of agents that report to you" }],
57784
+ details: {}
57785
+ };
57786
+ }
57787
+ if (isEphemeralAgent(target)) {
57788
+ return {
57789
+ content: [{ type: "text", text: `ERROR: Cannot update ephemeral/runtime agent ${params.agent_id}` }],
57790
+ details: {}
57791
+ };
57792
+ }
57793
+ if (params.soul && params.soul.length > 1e4) {
57794
+ return {
57795
+ content: [{ type: "text", text: "ERROR: soul exceeds 10000 character limit" }],
57796
+ details: {}
57797
+ };
57798
+ }
57799
+ if (params.instructions_text && params.instructions_text.length > 5e4) {
57800
+ return {
57801
+ content: [{ type: "text", text: "ERROR: instructions_text exceeds 50000 character limit" }],
57802
+ details: {}
57803
+ };
57804
+ }
57805
+ if (params.instructions_path && params.instructions_path.length > 500) {
57806
+ return {
57807
+ content: [{ type: "text", text: "ERROR: instructions_path exceeds 500 character limit" }],
57808
+ details: {}
57809
+ };
57810
+ }
57811
+ if (params.heartbeat_procedure_path && params.heartbeat_procedure_path.length > 500) {
57812
+ return {
57813
+ content: [{ type: "text", text: "ERROR: heartbeat_procedure_path exceeds 500 character limit" }],
57814
+ details: {}
57815
+ };
57816
+ }
57817
+ const hasRuntimeConfigUpdates = [
57818
+ params.heartbeat_interval_ms,
57819
+ params.heartbeat_timeout_ms,
57820
+ params.max_concurrent_runs,
57821
+ params.message_response_mode
57822
+ ].some((value) => value !== void 0);
57823
+ const updateInput = {};
57824
+ if (params.soul !== void 0) updateInput.soul = params.soul;
57825
+ if (params.instructions_text !== void 0) updateInput.instructionsText = params.instructions_text;
57826
+ if (params.instructions_path !== void 0) updateInput.instructionsPath = params.instructions_path;
57827
+ if (params.heartbeat_procedure_path !== void 0) updateInput.heartbeatProcedurePath = params.heartbeat_procedure_path;
57828
+ if (hasRuntimeConfigUpdates) {
57829
+ updateInput.runtimeConfig = {
57830
+ ...target.runtimeConfig ?? {},
57831
+ ...params.heartbeat_interval_ms !== void 0 ? { heartbeatIntervalMs: params.heartbeat_interval_ms } : {},
57832
+ ...params.heartbeat_timeout_ms !== void 0 ? { heartbeatTimeoutMs: params.heartbeat_timeout_ms } : {},
57833
+ ...params.max_concurrent_runs !== void 0 ? { maxConcurrentRuns: params.max_concurrent_runs } : {},
57834
+ ...params.message_response_mode !== void 0 ? { messageResponseMode: params.message_response_mode } : {}
57835
+ };
57836
+ }
57837
+ if (Object.keys(updateInput).length === 0) {
57838
+ return {
57839
+ content: [{ type: "text", text: "ERROR: Provide at least one field to update" }],
57840
+ details: {}
57841
+ };
57842
+ }
57843
+ const updated = await agentStore.updateAgent(params.agent_id, updateInput);
57844
+ const updatedRuntimeConfig = updated.runtimeConfig ?? {};
57845
+ return {
57846
+ content: [{
57847
+ type: "text",
57848
+ text: `Updated ${updated.name} (${updated.id})
57849
+ heartbeatIntervalMs: ${String(updatedRuntimeConfig.heartbeatIntervalMs ?? "(default)")}
57850
+ heartbeatTimeoutMs: ${String(updatedRuntimeConfig.heartbeatTimeoutMs ?? "(default)")}
57851
+ maxConcurrentRuns: ${String(updatedRuntimeConfig.maxConcurrentRuns ?? "(default)")}
57852
+ messageResponseMode: ${String(updatedRuntimeConfig.messageResponseMode ?? "(default)")}`
57853
+ }],
57854
+ details: { agent: updated }
57855
+ };
57856
+ }
57857
+ };
57858
+ }
57580
57859
  function createDelegateTaskTool(agentStore, taskStore, options) {
57581
57860
  return {
57582
57861
  name: "fn_delegate_task",
@@ -57906,7 +58185,7 @@ ${lines.join("\n")}`
57906
58185
  }
57907
58186
  };
57908
58187
  }
57909
- var taskCreateParams, taskLogParams, taskDocumentWriteParams, taskDocumentReadParams, reflectOnPerformanceParams, listAgentsParams, delegateTaskParams, sendMessageParams, readMessagesParams, memorySearchParams, memoryGetParams, researchRunParams, researchListParams, researchGetParams, researchCancelParams, memoryAppendParams, log11, AGENT_MEMORY_ROOT2, AGENT_MEMORY_FILENAME2, AGENT_DREAMS_FILENAME2, agentQmdRefreshState, AGENT_QMD_REFRESH_INTERVAL_MS, DAILY_AGENT_MEMORY_RE2;
58188
+ var taskCreateParams, taskLogParams, taskDocumentWriteParams, taskDocumentReadParams, reflectOnPerformanceParams, listAgentsParams, delegateTaskParams, getAgentConfigParams, updateAgentConfigParams, sendMessageParams, readMessagesParams, memorySearchParams, memoryGetParams, researchRunParams, researchListParams, researchGetParams, researchCancelParams, memoryAppendParams, log11, AGENT_MEMORY_ROOT2, AGENT_MEMORY_FILENAME2, AGENT_DREAMS_FILENAME2, agentQmdRefreshState, AGENT_QMD_REFRESH_INTERVAL_MS, DAILY_AGENT_MEMORY_RE2;
57910
58189
  var init_agent_tools = __esm({
57911
58190
  "../engine/src/agent-tools.ts"() {
57912
58191
  "use strict";
@@ -57960,6 +58239,23 @@ var init_agent_tools = __esm({
57960
58239
  Type.Array(Type.String(), { description: 'Task IDs this new task depends on (e.g. ["KB-001"])' })
57961
58240
  )
57962
58241
  });
58242
+ getAgentConfigParams = Type.Object({
58243
+ agent_id: Type.String({ description: "The agent ID to read configuration for" })
58244
+ });
58245
+ updateAgentConfigParams = Type.Object({
58246
+ agent_id: Type.String({ description: "The agent ID to update" }),
58247
+ soul: Type.Optional(Type.String({ description: "Agent personality/identity text", maxLength: 1e4 })),
58248
+ instructions_text: Type.Optional(Type.String({ description: "Inline custom instructions", maxLength: 5e4 })),
58249
+ instructions_path: Type.Optional(Type.String({ description: "Path to instructions markdown file", maxLength: 500 })),
58250
+ heartbeat_procedure_path: Type.Optional(Type.String({ description: "Path to heartbeat procedure markdown file", maxLength: 500 })),
58251
+ heartbeat_interval_ms: Type.Optional(Type.Number({ description: "Heartbeat polling interval in ms", minimum: 1e3 })),
58252
+ heartbeat_timeout_ms: Type.Optional(Type.Number({ description: "Heartbeat timeout in ms", minimum: 5e3 })),
58253
+ max_concurrent_runs: Type.Optional(Type.Number({ description: "Max concurrent heartbeat runs", minimum: 1 })),
58254
+ message_response_mode: Type.Optional(Type.Union([
58255
+ Type.Literal("immediate"),
58256
+ Type.Literal("on-heartbeat")
58257
+ ], { description: "How agent responds to messages" }))
58258
+ });
57963
58259
  sendMessageParams = Type.Object({
57964
58260
  to_id: Type.String({ description: "Recipient ID (agent ID or user ID, depending on message type)" }),
57965
58261
  content: Type.String({ description: "Message body (1-2000 characters)" }),
@@ -62095,7 +62391,8 @@ async function execWithProcessGroup(command, options) {
62095
62391
  cwd: options.cwd,
62096
62392
  shell: true,
62097
62393
  detached: useProcessGroup,
62098
- stdio: ["ignore", "pipe", "pipe"]
62394
+ stdio: ["ignore", "pipe", "pipe"],
62395
+ ...options.env !== void 0 && { env: { ...process.env, ...options.env } }
62099
62396
  });
62100
62397
  let stdout = "";
62101
62398
  let stderr = "";
@@ -62297,7 +62594,7 @@ ${footer}`;
62297
62594
  return parts.join("\n") + `
62298
62595
  ${footer}`;
62299
62596
  }
62300
- async function runVerificationCommand(store, rootDir, taskId, command, type, signal, log18, agentLabel) {
62597
+ async function runVerificationCommand(store, rootDir, taskId, command, type, signal, log18, agentLabel, extraEnv) {
62301
62598
  const logger2 = log18 ?? { log: console.log, error: console.error, warn: console.warn };
62302
62599
  const label = agentLabel ?? "merger";
62303
62600
  if (signal?.aborted) {
@@ -62322,7 +62619,8 @@ async function runVerificationCommand(store, rootDir, taskId, command, type, sig
62322
62619
  cwd: rootDir,
62323
62620
  timeout: VERIFICATION_COMMAND_TIMEOUT_MS,
62324
62621
  maxBuffer: VERIFICATION_COMMAND_MAX_BUFFER,
62325
- signal
62622
+ signal,
62623
+ ...extraEnv !== void 0 && { env: extraEnv }
62326
62624
  });
62327
62625
  if (signal?.aborted) {
62328
62626
  throw Object.assign(
@@ -62567,7 +62865,7 @@ var init_run_audit = __esm({
62567
62865
  });
62568
62866
 
62569
62867
  // ../engine/src/merger.ts
62570
- import { execSync, exec as exec3 } from "node:child_process";
62868
+ import { execSync, exec as exec3, execFile as execFile3 } from "node:child_process";
62571
62869
  import { promisify as promisify4 } from "node:util";
62572
62870
  import { existsSync as existsSync24 } from "node:fs";
62573
62871
  import { join as join30 } from "node:path";
@@ -62699,11 +62997,51 @@ function throwIfAborted(signal, taskId) {
62699
62997
  if (!signal?.aborted) return;
62700
62998
  throw new MergeAbortedError(`Merge aborted for ${taskId}: engine shutdown requested`);
62701
62999
  }
63000
+ async function snapshotDirtyFiles(rootDir) {
63001
+ const paths = /* @__PURE__ */ new Set();
63002
+ try {
63003
+ const [unstagedOut, stagedOut, porcelainOut] = await Promise.all([
63004
+ execFileAsync("git", ["diff", "-z", "--name-only"], { cwd: rootDir, encoding: "utf-8" }).then(
63005
+ (r) => r.stdout,
63006
+ () => ""
63007
+ ),
63008
+ execFileAsync("git", ["diff", "-z", "--cached", "--name-only"], { cwd: rootDir, encoding: "utf-8" }).then(
63009
+ (r) => r.stdout,
63010
+ () => ""
63011
+ ),
63012
+ execFileAsync("git", ["status", "-z", "--porcelain"], { cwd: rootDir, encoding: "utf-8" }).then(
63013
+ (r) => r.stdout,
63014
+ () => ""
63015
+ )
63016
+ ]);
63017
+ for (const entry of unstagedOut.split("\0")) {
63018
+ const p = entry.trim();
63019
+ if (p) paths.add(p);
63020
+ }
63021
+ for (const entry of stagedOut.split("\0")) {
63022
+ const p = entry.trim();
63023
+ if (p) paths.add(p);
63024
+ }
63025
+ for (const entry of porcelainOut.split("\0")) {
63026
+ if (!entry.startsWith("?? ")) continue;
63027
+ const p = entry.slice(3);
63028
+ if (p) paths.add(p);
63029
+ }
63030
+ } catch {
63031
+ }
63032
+ return paths;
63033
+ }
62702
63034
  function rethrowIfMergeAborted(error) {
62703
63035
  if (error instanceof Error && error.name === "MergeAbortedError") {
62704
63036
  throw error;
62705
63037
  }
62706
63038
  }
63039
+ function execSyncText(command, options) {
63040
+ const output = execSync(command, options);
63041
+ if (output == null) return "";
63042
+ if (typeof output === "string") return output.trim();
63043
+ return output.toString("utf-8").trim();
63044
+ }
62707
63045
  async function runDeterministicVerification(store, rootDir, taskId, testCommand, buildCommand2, testSource, buildSource, signal) {
62708
63046
  const result = { allPassed: true };
62709
63047
  if (!testCommand && !buildCommand2) {
@@ -62714,6 +63052,35 @@ async function runDeterministicVerification(store, rootDir, taskId, testCommand,
62714
63052
  const normalizedBuildCommand = buildCommand2?.trim();
62715
63053
  const hasTestCommand = !!normalizedTestCommand;
62716
63054
  const hasBuildCommand = !!normalizedBuildCommand;
63055
+ const effectiveTestCommand = normalizedTestCommand ?? "";
63056
+ const effectiveBuildCommand = normalizedBuildCommand ?? "";
63057
+ let treeSha = null;
63058
+ try {
63059
+ treeSha = execSync("git rev-parse HEAD^{tree}", { cwd: rootDir, stdio: "pipe" }).toString().trim();
63060
+ } catch (err) {
63061
+ mergerLog.warn(`${taskId}: could not resolve tree sha \u2014 skipping verification cache: ${String(err)}`);
63062
+ }
63063
+ if (treeSha) {
63064
+ const cacheHit = store.getVerificationCacheHit(treeSha, effectiveTestCommand, effectiveBuildCommand);
63065
+ if (cacheHit) {
63066
+ const sha7 = treeSha.slice(0, 7);
63067
+ const msg = `Skipping deterministic verification \u2014 cached pass for tree ${sha7} (recorded at ${cacheHit.recordedAt}, by ${cacheHit.taskId ?? "unknown"})`;
63068
+ mergerLog.log(`${taskId}: ${msg}`);
63069
+ await store.logEntry(taskId, msg);
63070
+ await store.appendAgentLog(taskId, msg, "text", void 0, "merger");
63071
+ const syntheticResult = {
63072
+ command: "",
63073
+ exitCode: 0,
63074
+ stdout: "",
63075
+ stderr: "",
63076
+ success: true,
63077
+ cached: true
63078
+ };
63079
+ if (hasTestCommand) result.testResult = { ...syntheticResult, command: effectiveTestCommand };
63080
+ if (hasBuildCommand) result.buildResult = { ...syntheticResult, command: effectiveBuildCommand };
63081
+ return result;
63082
+ }
63083
+ }
62717
63084
  const testSourceLabel = testSource === "inferred" ? " [inferred]" : "";
62718
63085
  const buildSourceLabel = buildSource === "inferred" ? " [inferred]" : "";
62719
63086
  mergerLog.log(
@@ -62787,13 +63154,23 @@ async function runDeterministicVerification(store, rootDir, taskId, testCommand,
62787
63154
  mergerLog.log(`${taskId}: deterministic verification passed`);
62788
63155
  await store.logEntry(taskId, "Deterministic merge verification passed");
62789
63156
  await store.appendAgentLog(taskId, "Deterministic merge verification passed", "text", void 0, "merger");
63157
+ if (treeSha) {
63158
+ try {
63159
+ store.recordVerificationCachePass(treeSha, effectiveTestCommand, effectiveBuildCommand, taskId);
63160
+ mergerLog.log(`${taskId}: Recorded verification pass for tree ${treeSha.slice(0, 7)}`);
63161
+ await store.logEntry(taskId, `Recorded verification pass for tree ${treeSha.slice(0, 7)}`);
63162
+ } catch (err) {
63163
+ mergerLog.warn(`${taskId}: could not record verification cache pass: ${String(err)}`);
63164
+ }
63165
+ }
62790
63166
  return result;
62791
63167
  }
62792
63168
  async function runVerificationCommand2(store, rootDir, taskId, command, type, signal) {
62793
63169
  throwIfAborted(signal, taskId);
62794
- return runVerificationCommand(store, rootDir, taskId, command, type, signal, mergerLog, "merger");
63170
+ return runVerificationCommand(store, rootDir, taskId, command, type, signal, mergerLog, "merger", VERIFICATION_EXTRA_ENV);
62795
63171
  }
62796
- async function attemptInMergeVerificationFix(store, rootDir, taskId, failureContext, settings, options, mergeRunContext, fixAttemptNumber, _testCommand, _buildCommand) {
63172
+ async function attemptInMergeVerificationFix(store, rootDir, taskId, failureContext, settings, options, mergeRunContext, fixAttemptNumber, _testCommand, _buildCommand, fixModifiedFiles) {
63173
+ const preFixSnapshot = await snapshotDirtyFiles(rootDir);
62797
63174
  try {
62798
63175
  mergerLog.log(`${taskId}: spawning in-merge verification fix agent`);
62799
63176
  const logger2 = new AgentLogger({
@@ -62909,6 +63286,14 @@ ${failureContext.output.slice(0, VERIFICATION_LOG_MAX_CHARS)}
62909
63286
  signal: options.signal
62910
63287
  });
62911
63288
  await accumulateSessionTokenUsage(store, taskId, session);
63289
+ const postFixSnapshot = await snapshotDirtyFiles(rootDir);
63290
+ if (fixModifiedFiles) {
63291
+ for (const p of postFixSnapshot) {
63292
+ if (!preFixSnapshot.has(p)) {
63293
+ fixModifiedFiles.add(p);
63294
+ }
63295
+ }
63296
+ }
62912
63297
  await store.logEntry(
62913
63298
  taskId,
62914
63299
  `Re-running deterministic merge verification (attempt ${fixAttemptNumber ?? "unknown"})`
@@ -62935,6 +63320,17 @@ ${failureContext.output.slice(0, VERIFICATION_LOG_MAX_CHARS)}
62935
63320
  }
62936
63321
  } catch (err) {
62937
63322
  rethrowIfMergeAborted(err);
63323
+ if (fixModifiedFiles) {
63324
+ try {
63325
+ const postFixSnapshot = await snapshotDirtyFiles(rootDir);
63326
+ for (const p of postFixSnapshot) {
63327
+ if (!preFixSnapshot.has(p)) {
63328
+ fixModifiedFiles.add(p);
63329
+ }
63330
+ }
63331
+ } catch {
63332
+ }
63333
+ }
62938
63334
  const errorMessage = err instanceof Error ? err.message : String(err);
62939
63335
  mergerLog.warn(`${taskId}: in-merge fix agent error: ${errorMessage}`);
62940
63336
  await store.logEntry(taskId, "In-merge verification fix agent encountered an error", errorMessage);
@@ -63015,15 +63411,58 @@ ${trimmedDiffStat}` : ""
63015
63411
  bodyArg: `-m "${escape(body)}"`
63016
63412
  };
63017
63413
  }
63018
- async function commitOrAmendMergeWithFixes(rootDir, taskId, branch, commitLog, includeTaskId, preAttemptHeadSha, authorArg, diffStat, settings, signal, aiSummary, aiSubject) {
63414
+ async function commitOrAmendMergeWithFixes(rootDir, taskId, branch, commitLog, includeTaskId, preAttemptHeadSha, authorArg, diffStat, settings, signal, aiSummary, aiSubject, fixModifiedFiles = /* @__PURE__ */ new Set()) {
63019
63415
  try {
63020
- const { stdout: unstagedFiles } = await execAsync2("git diff --name-only", {
63416
+ const { stdout: squashStagedOut } = await execAsync2("git diff --cached --name-only", {
63021
63417
  cwd: rootDir,
63022
63418
  encoding: "utf-8"
63023
63419
  });
63024
- if (unstagedFiles.trim().length > 0) {
63025
- await execAsync2("git add -A", { cwd: rootDir });
63420
+ const squashStaged = new Set(squashStagedOut.split("\n").map((l) => l.trim()).filter(Boolean));
63421
+ const { stdout: unstagedOut } = await execAsync2("git diff --name-only", {
63422
+ cwd: rootDir,
63423
+ encoding: "utf-8"
63424
+ });
63425
+ const unstaged = new Set(unstagedOut.split("\n").map((l) => l.trim()).filter(Boolean));
63426
+ const { stdout: porcelainOut } = await execFileAsync("git", ["status", "-z", "--porcelain"], {
63427
+ cwd: rootDir,
63428
+ encoding: "utf-8"
63429
+ });
63430
+ const untracked = /* @__PURE__ */ new Set();
63431
+ for (const entry of porcelainOut.split("\0")) {
63432
+ if (!entry.startsWith("?? ")) continue;
63433
+ const p = entry.slice(3);
63434
+ if (p) untracked.add(p);
63435
+ }
63436
+ const unstagedToStage = [];
63437
+ for (const p of unstaged) {
63438
+ if (fixModifiedFiles.has(p)) {
63439
+ unstagedToStage.push(p);
63440
+ } else {
63441
+ mergerLog.warn(
63442
+ `${taskId}: refusing to stage unrelated working-tree change: ${p} (not part of squash or in-merge fix)`
63443
+ );
63444
+ }
63445
+ }
63446
+ if (unstagedToStage.length > 0) {
63447
+ await execFileAsync("git", ["add", "--", ...unstagedToStage], { cwd: rootDir });
63448
+ }
63449
+ const untrackedToStage = [];
63450
+ for (const p of untracked) {
63451
+ if (fixModifiedFiles.has(p)) {
63452
+ untrackedToStage.push(p);
63453
+ } else {
63454
+ mergerLog.warn(
63455
+ `${taskId}: refusing to stage unrelated working-tree change: ${p} (not part of squash or in-merge fix)`
63456
+ );
63457
+ }
63026
63458
  }
63459
+ if (untrackedToStage.length > 0) {
63460
+ await execFileAsync("git", ["add", "--", ...untrackedToStage], { cwd: rootDir });
63461
+ }
63462
+ const cap = (arr, n = 20) => arr.length <= n ? arr.join(", ") : `${arr.slice(0, n).join(", ")} ... (+${arr.length - n} more)`;
63463
+ mergerLog.log(
63464
+ `${taskId}: staging allowlist \u2014 squash: [${cap([...squashStaged])}], fixModified: [${cap([...fixModifiedFiles])}]`
63465
+ );
63027
63466
  const { stdout: staged } = await execAsync2("git diff --cached --raw", {
63028
63467
  cwd: rootDir,
63029
63468
  encoding: "utf-8"
@@ -63291,8 +63730,8 @@ async function classifyConflict(filePath, cwd) {
63291
63730
  }
63292
63731
  async function resolveWithOurs(filePath, cwd) {
63293
63732
  try {
63294
- await execAsync2(`git checkout --ours "${filePath}"`, { cwd });
63295
- await execAsync2(`git add "${filePath}"`, { cwd });
63733
+ await execFileAsync("git", ["checkout", "--ours", "--", filePath], { cwd });
63734
+ await execFileAsync("git", ["add", "--", filePath], { cwd });
63296
63735
  mergerLog.log(`Auto-resolved ${filePath} using --ours`);
63297
63736
  } catch (error) {
63298
63737
  throw new Error(`Failed to auto-resolve ${filePath} with ours: ${error}`);
@@ -63300,8 +63739,8 @@ async function resolveWithOurs(filePath, cwd) {
63300
63739
  }
63301
63740
  async function resolveWithTheirs(filePath, cwd) {
63302
63741
  try {
63303
- await execAsync2(`git checkout --theirs "${filePath}"`, { cwd });
63304
- await execAsync2(`git add "${filePath}"`, { cwd });
63742
+ await execFileAsync("git", ["checkout", "--theirs", "--", filePath], { cwd });
63743
+ await execFileAsync("git", ["add", "--", filePath], { cwd });
63305
63744
  mergerLog.log(`Auto-resolved ${filePath} using --theirs`);
63306
63745
  } catch (error) {
63307
63746
  throw new Error(`Failed to auto-resolve ${filePath} with theirs: ${error}`);
@@ -63309,7 +63748,7 @@ async function resolveWithTheirs(filePath, cwd) {
63309
63748
  }
63310
63749
  async function resolveTrivialWhitespace(filePath, cwd) {
63311
63750
  try {
63312
- await execAsync2(`git add "${filePath}"`, { cwd });
63751
+ await execFileAsync("git", ["add", "--", filePath], { cwd });
63313
63752
  mergerLog.log(`Auto-resolved ${filePath} (trivial whitespace)`);
63314
63753
  } catch (error) {
63315
63754
  throw new Error(`Failed to auto-resolve ${filePath} trivial conflict: ${error}`);
@@ -63617,7 +64056,7 @@ function parsePushRemoteTarget(rootDir, pushRemote) {
63617
64056
  const remote = remoteToken || "origin";
63618
64057
  let branch = branchTokens.join(" ").trim();
63619
64058
  if (!branch) {
63620
- branch = execSync("git symbolic-ref --short HEAD", {
64059
+ branch = execSyncText("git symbolic-ref --short HEAD", {
63621
64060
  cwd: rootDir,
63622
64061
  encoding: "utf-8",
63623
64062
  stdio: "pipe"
@@ -63925,7 +64364,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
63925
64364
  } catch {
63926
64365
  result.error = `Branch '${branch}' not found \u2014 moving to done without merge`;
63927
64366
  try {
63928
- const commitSha = execSync("git rev-parse HEAD", {
64367
+ const commitSha = execSyncText("git rev-parse HEAD", {
63929
64368
  cwd: rootDir,
63930
64369
  stdio: "pipe",
63931
64370
  encoding: "utf-8"
@@ -63948,12 +64387,12 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
63948
64387
  }
63949
64388
  try {
63950
64389
  throwIfAborted(options.signal, taskId);
63951
- const currentBranch = execSync("git symbolic-ref --short HEAD", {
64390
+ const currentBranch = execSyncText("git symbolic-ref --short HEAD", {
63952
64391
  cwd: rootDir,
63953
64392
  encoding: "utf-8",
63954
64393
  stdio: "pipe"
63955
64394
  }).trim();
63956
- const mainBranch = execSync("git rev-parse --abbrev-ref origin/HEAD", {
64395
+ const mainBranch = execSyncText("git rev-parse --abbrev-ref origin/HEAD", {
63957
64396
  cwd: rootDir,
63958
64397
  encoding: "utf-8",
63959
64398
  stdio: "pipe"
@@ -64442,6 +64881,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
64442
64881
  const failedType = verificationErr.verificationResult.testResult?.success === false ? "test" : "build";
64443
64882
  if (failedResult) {
64444
64883
  let fixSuccess = false;
64884
+ const verificationFixModifiedFiles = /* @__PURE__ */ new Set();
64445
64885
  for (let fixAttempt = 1; fixAttempt <= maxFixRetries; fixAttempt++) {
64446
64886
  const fixAttemptStartedAt = Date.now();
64447
64887
  mergerLog.log(`${taskId}: in-merge verification fix attempt ${fixAttempt}/${maxFixRetries}`);
@@ -64469,7 +64909,8 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
64469
64909
  { runId: mergeRunId, agentId: engineRunContext.agentId },
64470
64910
  fixAttempt,
64471
64911
  effectiveTestCommand,
64472
- effectiveBuildCommand
64912
+ effectiveBuildCommand,
64913
+ verificationFixModifiedFiles
64473
64914
  );
64474
64915
  const fixAttemptDurationMs = Date.now() - fixAttemptStartedAt;
64475
64916
  if (fixSuccess) {
@@ -64508,7 +64949,8 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
64508
64949
  settings,
64509
64950
  options.signal,
64510
64951
  aiMergeSummary,
64511
- aiMergeSubject
64952
+ aiMergeSubject,
64953
+ verificationFixModifiedFiles
64512
64954
  );
64513
64955
  if (!finalized) {
64514
64956
  resetMergeWithWarn(rootDir, taskId, "verification-fix finalize");
@@ -64539,6 +64981,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
64539
64981
  const fixCommand = effectiveBuildCommand || effectiveTestCommand;
64540
64982
  const fixType = effectiveBuildCommand ? "build" : "test";
64541
64983
  let fixSuccess = false;
64984
+ const buildFixModifiedFiles = /* @__PURE__ */ new Set();
64542
64985
  for (let fixAttempt = 1; fixAttempt <= maxFixRetries; fixAttempt++) {
64543
64986
  const fixAttemptStartedAt = Date.now();
64544
64987
  mergerLog.log(`${taskId}: in-merge verification fix attempt ${fixAttempt}/${maxFixRetries}`);
@@ -64566,7 +65009,8 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
64566
65009
  { runId: mergeRunId, agentId: engineRunContext.agentId },
64567
65010
  fixAttempt,
64568
65011
  effectiveTestCommand,
64569
- effectiveBuildCommand
65012
+ effectiveBuildCommand,
65013
+ buildFixModifiedFiles
64570
65014
  );
64571
65015
  const fixAttemptDurationMs = Date.now() - fixAttemptStartedAt;
64572
65016
  if (fixSuccess) {
@@ -64604,7 +65048,8 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
64604
65048
  settings,
64605
65049
  options.signal,
64606
65050
  aiMergeSummary,
64607
- aiMergeSubject
65051
+ aiMergeSubject,
65052
+ buildFixModifiedFiles
64608
65053
  );
64609
65054
  if (!finalized) {
64610
65055
  resetMergeWithWarn(rootDir, taskId, "build-verification fix finalize");
@@ -64685,7 +65130,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
64685
65130
  throw new Error(`AI merge failed for ${taskId}: all 3 attempts exhausted`);
64686
65131
  }
64687
65132
  try {
64688
- const commitSha = execSync("git rev-parse HEAD", {
65133
+ const commitSha = execSyncText("git rev-parse HEAD", {
64689
65134
  cwd: rootDir,
64690
65135
  stdio: "pipe",
64691
65136
  encoding: "utf-8"
@@ -64901,7 +65346,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
64901
65346
  async function tryFastForwardFromOrigin(rootDir, taskId) {
64902
65347
  let currentBranch;
64903
65348
  try {
64904
- currentBranch = execSync("git rev-parse --abbrev-ref HEAD", {
65349
+ currentBranch = execSyncText("git rev-parse --abbrev-ref HEAD", {
64905
65350
  cwd: rootDir,
64906
65351
  encoding: "utf-8",
64907
65352
  stdio: "pipe"
@@ -64919,7 +65364,7 @@ async function tryFastForwardFromOrigin(rootDir, taskId) {
64919
65364
  let behind = 0;
64920
65365
  let ahead = 0;
64921
65366
  try {
64922
- const counts = execSync(`git rev-list --left-right --count "origin/${currentBranch}...HEAD"`, {
65367
+ const counts = execSyncText(`git rev-list --left-right --count "origin/${currentBranch}...HEAD"`, {
64923
65368
  cwd: rootDir,
64924
65369
  encoding: "utf-8",
64925
65370
  stdio: "pipe"
@@ -65058,7 +65503,7 @@ async function executeMergeAttempt(params, aiTracker) {
65058
65503
  }
65059
65504
  }
65060
65505
  if (complex.length === 0) {
65061
- const staged = execSync("git diff --cached --quiet 2>&1; echo $?", {
65506
+ const staged = execSyncText("git diff --cached --quiet 2>&1; echo $?", {
65062
65507
  cwd: rootDir,
65063
65508
  encoding: "utf-8"
65064
65509
  }).trim();
@@ -65159,7 +65604,7 @@ async function executeMergeAttempt(params, aiTracker) {
65159
65604
  }
65160
65605
  return true;
65161
65606
  }
65162
- const conflictedOutput = execSync("git diff --name-only --diff-filter=U", {
65607
+ const conflictedOutput = execSyncText("git diff --name-only --diff-filter=U", {
65163
65608
  cwd: rootDir,
65164
65609
  encoding: "utf-8"
65165
65610
  }).trim();
@@ -65280,7 +65725,7 @@ async function attemptWithSideStrategy(params, side = "theirs", aiTracker) {
65280
65725
  await execAsync2(`git merge -X ${side} --squash "${branch}"`, {
65281
65726
  cwd: rootDir
65282
65727
  });
65283
- const conflictedOutput = execSync("git diff --name-only --diff-filter=U", {
65728
+ const conflictedOutput = execSyncText("git diff --name-only --diff-filter=U", {
65284
65729
  cwd: rootDir,
65285
65730
  encoding: "utf-8"
65286
65731
  }).trim();
@@ -65288,7 +65733,7 @@ async function attemptWithSideStrategy(params, side = "theirs", aiTracker) {
65288
65733
  mergerLog.warn(`${taskId}: -X ${side} left unresolved conflicts: ${conflictedOutput}`);
65289
65734
  return false;
65290
65735
  }
65291
- const staged = execSync("git diff --cached --quiet 2>&1; echo $?", {
65736
+ const staged = execSyncText("git diff --cached --quiet 2>&1; echo $?", {
65292
65737
  cwd: rootDir,
65293
65738
  encoding: "utf-8"
65294
65739
  }).trim();
@@ -65550,7 +65995,7 @@ async function runAiAgentForCommit(params) {
65550
65995
  mergerLog.error(`Build verification failed for ${taskId}: ${buildErrorMessage}`);
65551
65996
  return { success: false, error: buildErrorMessage };
65552
65997
  }
65553
- const staged = execSync("git diff --cached --quiet 2>&1; echo $?", {
65998
+ const staged = execSyncText("git diff --cached --quiet 2>&1; echo $?", {
65554
65999
  cwd: rootDir,
65555
66000
  encoding: "utf-8"
65556
66001
  }).trim();
@@ -65945,7 +66390,7 @@ async function completeTask(store, taskId, result) {
65945
66390
  result.task = task;
65946
66391
  store.emit("task:merged", result);
65947
66392
  }
65948
- var execAsync2, LOCKFILE_PATTERNS, GENERATED_PATTERNS, DEPENDENCY_SYNC_TRIGGER_PATTERNS, WORKFLOW_SCRIPT_OUTPUT_MAX_CHARS, PULL_REBASE_TIMEOUT_MS, PUSH_TIMEOUT_MS, MERGE_COMMIT_LOG_MAX_CHARS, MERGE_DIFF_STAT_MAX_CHARS, VerificationError, MergeAbortedError, FUSION_TASK_ID_TRAILER_KEY;
66393
+ var execAsync2, execFileAsync, LOCKFILE_PATTERNS, GENERATED_PATTERNS, DEPENDENCY_SYNC_TRIGGER_PATTERNS, WORKFLOW_SCRIPT_OUTPUT_MAX_CHARS, PULL_REBASE_TIMEOUT_MS, PUSH_TIMEOUT_MS, MERGE_COMMIT_LOG_MAX_CHARS, MERGE_DIFF_STAT_MAX_CHARS, VerificationError, MergeAbortedError, VERIFICATION_EXTRA_ENV, FUSION_TASK_ID_TRAILER_KEY;
65949
66394
  var init_merger = __esm({
65950
66395
  "../engine/src/merger.ts"() {
65951
66396
  "use strict";
@@ -65965,6 +66410,7 @@ var init_merger = __esm({
65965
66410
  init_agent_instructions();
65966
66411
  init_run_audit();
65967
66412
  execAsync2 = promisify4(exec3);
66413
+ execFileAsync = promisify4(execFile3);
65968
66414
  LOCKFILE_PATTERNS = [
65969
66415
  "package-lock.json",
65970
66416
  "pnpm-lock.yaml",
@@ -66019,6 +66465,13 @@ var init_merger = __esm({
66019
66465
  this.name = "MergeAbortedError";
66020
66466
  }
66021
66467
  };
66468
+ VERIFICATION_EXTRA_ENV = Object.fromEntries(
66469
+ [
66470
+ ["FUSION_TEST_TOTAL_WORKERS", "8"],
66471
+ ["FUSION_TEST_CONCURRENCY", "4"],
66472
+ ["FUSION_TEST_WORKSPACE_CONCURRENCY", "4"]
66473
+ ].filter(([key]) => !(key in process.env))
66474
+ );
66022
66475
  FUSION_TASK_ID_TRAILER_KEY = "Fusion-Task-Id";
66023
66476
  }
66024
66477
  });
@@ -70146,7 +70599,11 @@ ${summary}`,
70146
70599
  // Agent delegation tools — discover and delegate work to other agents.
70147
70600
  ...this.options.agentStore ? [
70148
70601
  createListAgentsTool(this.options.agentStore),
70149
- createDelegateTaskTool(this.options.agentStore, this.store, { rootDir: this.rootDir })
70602
+ createDelegateTaskTool(this.options.agentStore, this.store, { rootDir: this.rootDir }),
70603
+ ...assignedAgentId ? [
70604
+ createGetAgentConfigTool(this.options.agentStore, assignedAgentId),
70605
+ createUpdateAgentConfigTool(this.options.agentStore, assignedAgentId)
70606
+ ] : []
70150
70607
  ] : [],
70151
70608
  // Messaging tools — allows executor agents to send and receive messages.
70152
70609
  ...this.options.messageStore && assignedAgentId ? [
@@ -76152,6 +76609,7 @@ Your job:
76152
76609
  2. Do ONE useful action that changes project clarity or flow.
76153
76610
  3. Use fn_task_create to spawn follow-up work, fn_task_log to record observations, and fn_task_document_write for durable artifacts.
76154
76611
  4. Use fn_list_agents + fn_delegate_task when work should be assigned to a specific capable agent now.
76612
+ 5. Use fn_get_agent_config and fn_update_agent_config to tune direct reports before delegating recurring work.
76155
76613
  5. Call fn_heartbeat_done when finished with an optional summary of what was accomplished.
76156
76614
 
76157
76615
  Examples of ONE useful action:
@@ -76175,6 +76633,7 @@ Use this decision rule:
76175
76633
  - **Task document (fn_task_document_write):** when findings are structured and likely useful across future sessions for the same task.
76176
76634
  - **Create task (fn_task_create):** when someone must do new executable work.
76177
76635
  - **Delegate task (fn_delegate_task):** when that new work should go to a specific agent based on role/availability.
76636
+ - **Manage report config (fn_get_agent_config / fn_update_agent_config):** when direct reports need heartbeat, instruction, or personality tuning.
76178
76637
 
76179
76638
  Prefer fn_task_create when assignment is unclear and scheduler routing is fine.
76180
76639
  Prefer fn_delegate_task when immediate ownership by a specific agent materially reduces latency or risk.
@@ -76228,6 +76687,7 @@ Your job:
76228
76687
  2. Do ONE useful action: analyze, create follow-up tasks, delegate work, or update memory.
76229
76688
  3. Use fn_task_create to spawn follow-up work.
76230
76689
  4. Use fn_list_agents and fn_delegate_task to coordinate with other agents.
76690
+ 5. Use fn_get_agent_config and fn_update_agent_config to read/tune direct-report agents for better routing outcomes.
76231
76691
  5. Call fn_heartbeat_done when finished with an optional summary of what was accomplished.
76232
76692
 
76233
76693
  Examples of ONE useful action:
@@ -76241,6 +76701,7 @@ Keep work lightweight \u2014 this is a single-pass ambient check, not a full imp
76241
76701
  You have readonly file access plus:
76242
76702
  - fn_task_create
76243
76703
  - fn_list_agents and fn_delegate_task
76704
+ - fn_get_agent_config and fn_update_agent_config (for direct reports only)
76244
76705
  - fn_memory_search, fn_memory_get, and fn_memory_append
76245
76706
  - fn_heartbeat_done
76246
76707
  - fn_send_message and fn_read_messages when messaging is enabled for this run (they may not always be available)
@@ -77082,6 +77543,8 @@ not loop on the same plan across heartbeats without recording why.`;
77082
77543
  }, { rootDir: this.rootDir }));
77083
77544
  heartbeatTools.push(createListAgentsTool(this.store));
77084
77545
  heartbeatTools.push(createDelegateTaskTool(this.store, taskStore, { rootDir: this.rootDir }));
77546
+ heartbeatTools.push(createGetAgentConfigTool(this.store, agentId));
77547
+ heartbeatTools.push(createUpdateAgentConfigTool(this.store, agentId));
77085
77548
  if (this.messageStore) {
77086
77549
  heartbeatTools.push(createSendMessageTool(this.messageStore, agentId));
77087
77550
  heartbeatTools.push(createReadMessagesTool(this.messageStore, agentId));
@@ -77503,6 +77966,8 @@ ${taskDetail.prompt}` : "No PROMPT.md available.",
77503
77966
  tools.push(createTaskDocumentReadTool(taskStore, taskId));
77504
77967
  tools.push(createListAgentsTool(this.store));
77505
77968
  tools.push(createDelegateTaskTool(this.store, taskStore, { rootDir: this.rootDir }));
77969
+ tools.push(createGetAgentConfigTool(this.store, agentId));
77970
+ tools.push(createUpdateAgentConfigTool(this.store, agentId));
77506
77971
  if (messageStore) {
77507
77972
  tools.push(createSendMessageTool(messageStore, agentId));
77508
77973
  tools.push(createReadMessagesTool(messageStore, agentId));
@@ -82353,6 +82818,11 @@ var init_in_process_runtime = __esm({
82353
82818
  this.recordActivity();
82354
82819
  runtimeLog.log(`Started executing task ${task.id} in ${worktreePath}`);
82355
82820
  if (this.agentStore) {
82821
+ if (this.taskAgentMap.has(task.id)) {
82822
+ runtimeLog.warn(`Skipping task-worker creation for ${task.id}: agent already exists (${this.taskAgentMap.get(task.id)})`);
82823
+ return;
82824
+ }
82825
+ this.taskAgentMap.set(task.id, "creating");
82356
82826
  this.agentStore.createAgent({
82357
82827
  name: `executor-${task.id}`,
82358
82828
  role: "executor",
@@ -82370,6 +82840,7 @@ var init_in_process_runtime = __esm({
82370
82840
  await this.agentStore.updateAgentState(agent.id, "active");
82371
82841
  await this.agentStore.updateAgentState(agent.id, "running");
82372
82842
  }).catch((err) => {
82843
+ this.taskAgentMap.delete(task.id);
82373
82844
  runtimeLog.warn(`Failed to create agent for task ${task.id}:`, err);
82374
82845
  });
82375
82846
  }
@@ -84886,7 +85357,7 @@ var init_provider_adapters = __esm({
84886
85357
 
84887
85358
  // ../engine/src/remote-access/tunnel-process-manager.ts
84888
85359
  import { EventEmitter as EventEmitter23 } from "node:events";
84889
- import { exec as exec10, execFile as execFile3, spawn as spawn5 } from "node:child_process";
85360
+ import { exec as exec10, execFile as execFile4, spawn as spawn5 } from "node:child_process";
84890
85361
  import { promisify as promisify10 } from "node:util";
84891
85362
  function nowIso() {
84892
85363
  return (/* @__PURE__ */ new Date()).toISOString();
@@ -84927,7 +85398,7 @@ function toStateError(code, err) {
84927
85398
  at: nowIso()
84928
85399
  };
84929
85400
  }
84930
- var DEFAULT_MAX_LOG_ENTRIES, DEFAULT_STOP_TIMEOUT_MS2, execFileAsync, execAsync8, LineBuffer, TunnelProcessManager;
85401
+ var DEFAULT_MAX_LOG_ENTRIES, DEFAULT_STOP_TIMEOUT_MS2, execFileAsync2, execAsync8, LineBuffer, TunnelProcessManager;
84931
85402
  var init_tunnel_process_manager = __esm({
84932
85403
  "../engine/src/remote-access/tunnel-process-manager.ts"() {
84933
85404
  "use strict";
@@ -84935,7 +85406,7 @@ var init_tunnel_process_manager = __esm({
84935
85406
  init_provider_adapters();
84936
85407
  DEFAULT_MAX_LOG_ENTRIES = 400;
84937
85408
  DEFAULT_STOP_TIMEOUT_MS2 = 5e3;
84938
- execFileAsync = promisify10(execFile3);
85409
+ execFileAsync2 = promisify10(execFile4);
84939
85410
  execAsync8 = promisify10(exec10);
84940
85411
  LineBuffer = class {
84941
85412
  pending = "";
@@ -85013,7 +85484,7 @@ var init_tunnel_process_manager = __esm({
85013
85484
  return null;
85014
85485
  }
85015
85486
  try {
85016
- const { stdout } = await execFileAsync("tailscale", ["status", "--json"], { timeout: 3e3 });
85487
+ const { stdout } = await execFileAsync2("tailscale", ["status", "--json"], { timeout: 3e3 });
85017
85488
  const data = JSON.parse(String(stdout));
85018
85489
  const dnsName = data.Self?.DNSName?.replace(/\.$/, "");
85019
85490
  if (!dnsName) {
@@ -85036,7 +85507,7 @@ var init_tunnel_process_manager = __esm({
85036
85507
  ];
85037
85508
  for (const resetCommand of resetCommands) {
85038
85509
  try {
85039
- await execFileAsync(resetCommand.command, resetCommand.args, { timeout: 5e3 });
85510
+ await execFileAsync2(resetCommand.command, resetCommand.args, { timeout: 5e3 });
85040
85511
  return;
85041
85512
  } catch {
85042
85513
  }
@@ -85335,7 +85806,7 @@ var init_tunnel_process_manager = __esm({
85335
85806
  });
85336
85807
 
85337
85808
  // ../engine/src/project-engine.ts
85338
- import { execFile as execFile4 } from "node:child_process";
85809
+ import { execFile as execFile5 } from "node:child_process";
85339
85810
  import { promisify as promisify11 } from "node:util";
85340
85811
  function formatErrorDetails(error) {
85341
85812
  if (error instanceof Error) {
@@ -85347,7 +85818,7 @@ function formatErrorDetails(error) {
85347
85818
  const detail = String(error);
85348
85819
  return { message: detail, detail };
85349
85820
  }
85350
- var execFileAsync2, MERGE_HANDOFF_GRACE_MS, isRemoteActive, ProjectEngine;
85821
+ var execFileAsync3, MERGE_HANDOFF_GRACE_MS, isRemoteActive, ProjectEngine;
85351
85822
  var init_project_engine = __esm({
85352
85823
  "../engine/src/project-engine.ts"() {
85353
85824
  "use strict";
@@ -85365,7 +85836,7 @@ var init_project_engine = __esm({
85365
85836
  init_research_orchestrator();
85366
85837
  init_research_step_runner();
85367
85838
  init_tunnel_process_manager();
85368
- execFileAsync2 = promisify11(execFile4);
85839
+ execFileAsync3 = promisify11(execFile5);
85369
85840
  MERGE_HANDOFF_GRACE_MS = 300;
85370
85841
  isRemoteActive = (ra) => ra?.activeProvider != null && (ra.providers[ra.activeProvider]?.enabled ?? false);
85371
85842
  ProjectEngine = class _ProjectEngine {
@@ -86006,7 +86477,7 @@ ${detail}`
86006
86477
  async checkExecutableAvailable(command) {
86007
86478
  const checker = process.platform === "win32" ? "where" : "which";
86008
86479
  try {
86009
- await execFileAsync2(checker, [command]);
86480
+ await execFileAsync3(checker, [command]);
86010
86481
  return { available: true };
86011
86482
  } catch {
86012
86483
  return {
@@ -86097,7 +86568,11 @@ ${detail}`
86097
86568
  if (this.mergeActive.has(taskId)) return;
86098
86569
  this.mergeActive.add(taskId);
86099
86570
  this.mergeQueue.push(taskId);
86100
- void this.drainMergeQueue();
86571
+ void this.drainMergeQueue().catch((err) => {
86572
+ runtimeLog.error(
86573
+ `Merge queue drain failed unexpectedly: ${err instanceof Error ? err.message : String(err)}`
86574
+ );
86575
+ });
86101
86576
  }
86102
86577
  /**
86103
86578
  * Filter a sweep's listTasks() result to merge-eligible tasks, sort by
@@ -86619,15 +87094,19 @@ ${detail}`
86619
87094
  runtimeLog.warn(
86620
87095
  `Auto-merge periodic sweep failed: ${err instanceof Error ? err.message : String(err)}`
86621
87096
  );
86622
- }
86623
- if (!this.shuttingDown) {
86624
- const interval = await store.getSettings().then((s) => s.pollIntervalMs ?? 15e3).catch((err) => {
86625
- runtimeLog.warn(
86626
- `Auto-merge retry: failed to read pollIntervalMs, using default 15s: ${err instanceof Error ? err.message : String(err)}`
86627
- );
86628
- return 15e3;
86629
- });
86630
- this.mergeRetryTimer = setTimeout(() => void schedule(), interval);
87097
+ } finally {
87098
+ if (!this.shuttingDown) {
87099
+ let interval = 15e3;
87100
+ try {
87101
+ const settings = await store.getSettings();
87102
+ interval = settings.pollIntervalMs ?? 15e3;
87103
+ } catch (err) {
87104
+ runtimeLog.warn(
87105
+ `Auto-merge retry: failed to read pollIntervalMs, using default 15s: ${err instanceof Error ? err.message : String(err)}`
87106
+ );
87107
+ }
87108
+ this.mergeRetryTimer = setTimeout(() => void schedule(), interval);
87109
+ }
86631
87110
  }
86632
87111
  };
86633
87112
  this.mergeRetryTimer = setTimeout(() => void schedule(), 15e3);
@@ -89504,7 +89983,6 @@ function definePlugin(plugin4) {
89504
89983
  var init_src3 = __esm({
89505
89984
  "../plugin-sdk/src/index.ts"() {
89506
89985
  "use strict";
89507
- init_src();
89508
89986
  }
89509
89987
  });
89510
89988
 
@@ -96697,13 +97175,13 @@ var init_github_poll = __esm({
96697
97175
  });
96698
97176
 
96699
97177
  // ../dashboard/src/routes/resolve-diff-base.ts
96700
- import { execFile as execFile5 } from "node:child_process";
97178
+ import { execFile as execFile6 } from "node:child_process";
96701
97179
  import { promisify as promisify12 } from "node:util";
96702
- var execFileAsync3;
97180
+ var execFileAsync4;
96703
97181
  var init_resolve_diff_base = __esm({
96704
97182
  "../dashboard/src/routes/resolve-diff-base.ts"() {
96705
97183
  "use strict";
96706
- execFileAsync3 = promisify12(execFile5);
97184
+ execFileAsync4 = promisify12(execFile6);
96707
97185
  }
96708
97186
  });
96709
97187
 
@@ -97062,13 +97540,13 @@ var init_register_agents_projects_nodes = __esm({
97062
97540
  });
97063
97541
 
97064
97542
  // ../dashboard/src/exec-file.ts
97065
- import { execFile as execFile6 } from "node:child_process";
97543
+ import { execFile as execFile7 } from "node:child_process";
97066
97544
  import { promisify as promisify13 } from "node:util";
97067
- var execFileAsync4;
97545
+ var execFileAsync5;
97068
97546
  var init_exec_file = __esm({
97069
97547
  "../dashboard/src/exec-file.ts"() {
97070
97548
  "use strict";
97071
- execFileAsync4 = promisify13(execFile6);
97549
+ execFileAsync5 = promisify13(execFile7);
97072
97550
  }
97073
97551
  });
97074
97552
 
@@ -97337,9 +97815,9 @@ var init_claude_cli_probe = __esm({
97337
97815
  }
97338
97816
  });
97339
97817
 
97340
- // ../../plugins/fusion-plugin-droid-runtime/dist/probe.js
97818
+ // ../../plugins/fusion-plugin-droid-runtime/src/probe.ts
97341
97819
  var init_probe3 = __esm({
97342
- "../../plugins/fusion-plugin-droid-runtime/dist/probe.js"() {
97820
+ "../../plugins/fusion-plugin-droid-runtime/src/probe.ts"() {
97343
97821
  "use strict";
97344
97822
  }
97345
97823
  });
@@ -97352,6 +97830,13 @@ var init_droid_cli_probe = __esm({
97352
97830
  }
97353
97831
  });
97354
97832
 
97833
+ // ../dashboard/src/llama-cpp-probe.ts
97834
+ var init_llama_cpp_probe = __esm({
97835
+ "../dashboard/src/llama-cpp-probe.ts"() {
97836
+ "use strict";
97837
+ }
97838
+ });
97839
+
97355
97840
  // ../dashboard/src/routes/register-auth-routes.ts
97356
97841
  var init_register_auth_routes = __esm({
97357
97842
  "../dashboard/src/routes/register-auth-routes.ts"() {
@@ -97359,6 +97844,7 @@ var init_register_auth_routes = __esm({
97359
97844
  init_src();
97360
97845
  init_claude_cli_probe();
97361
97846
  init_droid_cli_probe();
97847
+ init_llama_cpp_probe();
97362
97848
  init_api_error();
97363
97849
  init_usage();
97364
97850
  init_project_store_resolver();