@runfusion/fusion 0.18.1 → 0.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin.js +1809 -689
- package/dist/client/assets/{ChatView-3Sqm6teN.js → ChatView-DEG93wpC.js} +1 -1
- package/dist/client/assets/{DevServerView-r6V3FqkY.js → DevServerView-DI71QIND.js} +1 -1
- package/dist/client/assets/{DirectoryPicker-CTZE95Fk.js → DirectoryPicker-6eBfMR3k.js} +1 -1
- package/dist/client/assets/{DocumentsView-DSEf1Lmg.js → DocumentsView-D9pxwmaa.js} +1 -1
- package/dist/client/assets/{InsightsView-F5PZsX5u.js → InsightsView-D2_XwizY.js} +1 -1
- package/dist/client/assets/{MemoryView-DicXjec9.js → MemoryView-DfjllRpZ.js} +1 -1
- package/dist/client/assets/{NodesView-DddCS7zB.js → NodesView-D7hWWUCW.js} +1 -1
- package/dist/client/assets/{PiExtensionsManager-Ch7si-v8.js → PiExtensionsManager-d8cJKjcL.js} +2 -2
- package/dist/client/assets/{PluginManager-LcTh_fHP.js → PluginManager-CNzhmPzJ.js} +1 -1
- package/dist/client/assets/{ResearchView-D0TY1VcX.js → ResearchView-2xAa3pzZ.js} +1 -1
- package/dist/client/assets/{RoadmapsView-DfEF3mql.js → RoadmapsView-ajwwf979.js} +1 -1
- package/dist/client/assets/SettingsModal-BWe0KrGY.css +1 -0
- package/dist/client/assets/{SettingsModal-YcScdFiG.js → SettingsModal-D732WMft.js} +1 -1
- package/dist/client/assets/SettingsModal-Dk0zKdTy.js +31 -0
- package/dist/client/assets/{SetupWizardModal-EDYuf9Yc.js → SetupWizardModal-DohGTvQT.js} +1 -1
- package/dist/client/assets/{SkillsView-Dkq2CQla.js → SkillsView-CzVO7yTO.js} +1 -1
- package/dist/client/assets/index-CVCt2pCH.css +1 -0
- package/dist/client/assets/index-hnO5QagU.js +1239 -0
- package/dist/client/assets/{users-Cp5TSxVm.js → users-R3_m9pE5.js} +1 -1
- package/dist/client/index.html +2 -2
- package/dist/client/version.json +1 -1
- package/dist/droid-cli/package.json +1 -1
- package/dist/droid-cli/src/__tests__/provider.test.ts +1 -1
- package/dist/extension.js +610 -114
- package/dist/pi-claude-cli/package.json +1 -1
- package/dist/plugins/fusion-plugin-dependency-graph/package.json +1 -1
- package/package.json +3 -2
- package/skill/fusion/references/engine-tools.md +2 -0
- package/dist/client/assets/SettingsModal-SOADcCNJ.js +0 -31
- package/dist/client/assets/SettingsModal-oOnIed5O.css +0 -1
- package/dist/client/assets/index-4hC8zoTD.css +0 -1
- package/dist/client/assets/index-DNzA4aZ7.js +0 -1229
package/dist/extension.js
CHANGED
|
@@ -91,6 +91,7 @@ var init_settings_schema = __esm({
|
|
|
91
91
|
modelOnboardingComplete: void 0,
|
|
92
92
|
useClaudeCli: void 0,
|
|
93
93
|
useDroidCli: void 0,
|
|
94
|
+
useLlamaCpp: void 0,
|
|
94
95
|
// Global baseline lanes for per-role model selection
|
|
95
96
|
executionGlobalProvider: void 0,
|
|
96
97
|
executionGlobalModelId: void 0,
|
|
@@ -2771,7 +2772,7 @@ var init_db = __esm({
|
|
|
2771
2772
|
"use strict";
|
|
2772
2773
|
init_sqlite_adapter();
|
|
2773
2774
|
init_types();
|
|
2774
|
-
SCHEMA_VERSION =
|
|
2775
|
+
SCHEMA_VERSION = 61;
|
|
2775
2776
|
SCHEMA_SQL = `
|
|
2776
2777
|
-- Tasks table with JSON columns for nested data
|
|
2777
2778
|
CREATE TABLE IF NOT EXISTS tasks (
|
|
@@ -4643,6 +4644,21 @@ This means a caller passed a .fusion directory where a project root was expected
|
|
|
4643
4644
|
this.db.exec(`CREATE INDEX IF NOT EXISTS idxTasksPausedByAgentId ON tasks(pausedByAgentId)`);
|
|
4644
4645
|
});
|
|
4645
4646
|
}
|
|
4647
|
+
if (version < 61) {
|
|
4648
|
+
this.applyMigration(61, () => {
|
|
4649
|
+
this.db.exec(`
|
|
4650
|
+
CREATE TABLE IF NOT EXISTS verification_cache (
|
|
4651
|
+
treeSha TEXT NOT NULL,
|
|
4652
|
+
testCommand TEXT NOT NULL DEFAULT '',
|
|
4653
|
+
buildCommand TEXT NOT NULL DEFAULT '',
|
|
4654
|
+
recordedAt TEXT NOT NULL,
|
|
4655
|
+
taskId TEXT,
|
|
4656
|
+
PRIMARY KEY (treeSha, testCommand, buildCommand)
|
|
4657
|
+
)
|
|
4658
|
+
`);
|
|
4659
|
+
this.db.exec(`CREATE INDEX IF NOT EXISTS idxVerificationCacheRecordedAt ON verification_cache(recordedAt)`);
|
|
4660
|
+
});
|
|
4661
|
+
}
|
|
4646
4662
|
}
|
|
4647
4663
|
/**
|
|
4648
4664
|
* Run a single migration step inside a transaction and bump the version.
|
|
@@ -5078,6 +5094,33 @@ var init_agent_store = __esm({
|
|
|
5078
5094
|
`).run(migrationKey, migrationVersion);
|
|
5079
5095
|
this.db.bumpLastModified();
|
|
5080
5096
|
}
|
|
5097
|
+
/**
|
|
5098
|
+
* Find the first non-ephemeral agent by exact name.
|
|
5099
|
+
*
|
|
5100
|
+
* Ephemeral task-worker/spawned agents are excluded so callers can use this
|
|
5101
|
+
* for durable identity checks without transient runtime workers conflicting.
|
|
5102
|
+
*
|
|
5103
|
+
* @param name - Agent name to match exactly
|
|
5104
|
+
* @returns Matching non-ephemeral agent, or null when none exists
|
|
5105
|
+
*/
|
|
5106
|
+
async findAgentByName(name) {
|
|
5107
|
+
const rows = this.db.prepare("SELECT * FROM agents WHERE name = ? ORDER BY createdAt DESC").all(name);
|
|
5108
|
+
for (const row of rows) {
|
|
5109
|
+
const agent = this.mapAgentRow(row);
|
|
5110
|
+
if (!isEphemeralAgent(agent)) {
|
|
5111
|
+
return agent;
|
|
5112
|
+
}
|
|
5113
|
+
}
|
|
5114
|
+
return null;
|
|
5115
|
+
}
|
|
5116
|
+
async hasNonEphemeralAgentWithName(name) {
|
|
5117
|
+
const normalizedName = name.trim();
|
|
5118
|
+
if (!normalizedName) {
|
|
5119
|
+
return false;
|
|
5120
|
+
}
|
|
5121
|
+
const existing = await this.findAgentByName(normalizedName);
|
|
5122
|
+
return existing !== null;
|
|
5123
|
+
}
|
|
5081
5124
|
/**
|
|
5082
5125
|
* Create a new agent with "idle" state.
|
|
5083
5126
|
*
|
|
@@ -5090,9 +5133,12 @@ var init_agent_store = __esm({
|
|
|
5090
5133
|
* same default (1h) to both at runtime. Writing the default explicitly
|
|
5091
5134
|
* removes that divergence and keeps the persisted config truthful.
|
|
5092
5135
|
*
|
|
5136
|
+
* Also enforces non-ephemeral name uniqueness: durable agents cannot share a
|
|
5137
|
+
* name, while ephemeral task-worker agents are allowed to duplicate names.
|
|
5138
|
+
*
|
|
5093
5139
|
* @param input - Creation parameters
|
|
5094
5140
|
* @returns The created agent
|
|
5095
|
-
* @throws Error if input is invalid
|
|
5141
|
+
* @throws Error if input is invalid or a duplicate non-ephemeral name exists
|
|
5096
5142
|
*/
|
|
5097
5143
|
async createAgent(input) {
|
|
5098
5144
|
if (!input.name?.trim()) {
|
|
@@ -5101,15 +5147,22 @@ var init_agent_store = __esm({
|
|
|
5101
5147
|
if (!input.role) {
|
|
5102
5148
|
throw new Error("Agent role is required");
|
|
5103
5149
|
}
|
|
5150
|
+
const normalizedName = input.name.trim();
|
|
5151
|
+
const metadata = input.metadata ?? {};
|
|
5152
|
+
const ephemeral = isEphemeralAgent({ metadata, name: input.name, role: input.role, reportsTo: input.reportsTo });
|
|
5153
|
+
if (!ephemeral) {
|
|
5154
|
+
const existing = await this.findAgentByName(normalizedName);
|
|
5155
|
+
if (existing) {
|
|
5156
|
+
throw new Error(`Agent with name "${normalizedName}" already exists (agentId: ${existing.id})`);
|
|
5157
|
+
}
|
|
5158
|
+
}
|
|
5104
5159
|
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
5105
5160
|
const agentId = `agent-${randomUUID().slice(0, 8)}`;
|
|
5106
|
-
const metadata = input.metadata ?? {};
|
|
5107
5161
|
const runtimeConfig = resolveCreationRuntimeConfig(input.runtimeConfig, metadata);
|
|
5108
|
-
const ephemeral = isEphemeralAgent({ metadata, name: input.name, role: input.role, reportsTo: input.reportsTo });
|
|
5109
5162
|
const resolvedHeartbeatProcedurePath = input.heartbeatProcedurePath ?? (ephemeral ? void 0 : getDefaultHeartbeatProcedurePath(agentId, input.name));
|
|
5110
5163
|
const agent = {
|
|
5111
5164
|
id: agentId,
|
|
5112
|
-
name:
|
|
5165
|
+
name: normalizedName,
|
|
5113
5166
|
role: input.role,
|
|
5114
5167
|
state: "idle",
|
|
5115
5168
|
createdAt: now,
|
|
@@ -19457,10 +19510,10 @@ var init_central_core = __esm({
|
|
|
19457
19510
|
*/
|
|
19458
19511
|
async generateProjectName(projectPath) {
|
|
19459
19512
|
try {
|
|
19460
|
-
const { execFile:
|
|
19513
|
+
const { execFile: execFile8 } = await import("node:child_process");
|
|
19461
19514
|
const { promisify: promisify14 } = await import("node:util");
|
|
19462
|
-
const
|
|
19463
|
-
const { stdout } = await
|
|
19515
|
+
const execFileAsync6 = promisify14(execFile8);
|
|
19516
|
+
const { stdout } = await execFileAsync6(
|
|
19464
19517
|
"git",
|
|
19465
19518
|
["remote", "get-url", "origin"],
|
|
19466
19519
|
{ cwd: projectPath, timeout: 5e3 }
|
|
@@ -20128,10 +20181,10 @@ var init_migration = __esm({
|
|
|
20128
20181
|
return basename3(projectPath);
|
|
20129
20182
|
}
|
|
20130
20183
|
try {
|
|
20131
|
-
const { execFile:
|
|
20184
|
+
const { execFile: execFile8 } = await import("node:child_process");
|
|
20132
20185
|
const { promisify: promisify14 } = await import("node:util");
|
|
20133
|
-
const
|
|
20134
|
-
const { stdout } = await
|
|
20186
|
+
const execFileAsync6 = promisify14(execFile8);
|
|
20187
|
+
const { stdout } = await execFileAsync6(
|
|
20135
20188
|
"git",
|
|
20136
20189
|
["remote", "get-url", "origin"],
|
|
20137
20190
|
{ cwd: projectPath, timeout: 1e3 }
|
|
@@ -30835,13 +30888,13 @@ async function searchWithQmd(rootDir, options) {
|
|
|
30835
30888
|
const command = "qmd";
|
|
30836
30889
|
const limit = Math.max(1, Math.min(options.limit ?? 5, 20));
|
|
30837
30890
|
try {
|
|
30838
|
-
const { execFile:
|
|
30891
|
+
const { execFile: execFile8 } = await import("node:child_process");
|
|
30839
30892
|
const { promisify: promisify14 } = await import("node:util");
|
|
30840
|
-
const
|
|
30841
|
-
await ensureQmdProjectMemoryCollection(rootDir,
|
|
30893
|
+
const execFileAsync6 = promisify14(execFile8);
|
|
30894
|
+
await ensureQmdProjectMemoryCollection(rootDir, execFileAsync6);
|
|
30842
30895
|
scheduleQmdProjectMemoryRefresh(rootDir);
|
|
30843
30896
|
const args = buildQmdSearchArgs(rootDir, options);
|
|
30844
|
-
const { stdout } = await
|
|
30897
|
+
const { stdout } = await execFileAsync6(command, args, {
|
|
30845
30898
|
cwd: rootDir,
|
|
30846
30899
|
timeout: 4e3,
|
|
30847
30900
|
maxBuffer: 1024 * 1024
|
|
@@ -30866,12 +30919,12 @@ async function searchWithQmd(rootDir, options) {
|
|
|
30866
30919
|
return [];
|
|
30867
30920
|
}
|
|
30868
30921
|
}
|
|
30869
|
-
async function ensureQmdProjectMemoryCollection(rootDir,
|
|
30922
|
+
async function ensureQmdProjectMemoryCollection(rootDir, execFileAsync6) {
|
|
30870
30923
|
const collectionName = qmdMemoryCollectionName(rootDir);
|
|
30871
30924
|
const memoryDir = memoryWorkspacePath(rootDir);
|
|
30872
30925
|
await mkdir6(memoryDir, { recursive: true });
|
|
30873
30926
|
try {
|
|
30874
|
-
await
|
|
30927
|
+
await execFileAsync6("qmd", buildQmdCollectionAddArgs(rootDir), {
|
|
30875
30928
|
cwd: rootDir,
|
|
30876
30929
|
timeout: 4e3,
|
|
30877
30930
|
maxBuffer: 512 * 1024
|
|
@@ -30887,9 +30940,9 @@ ${stderr}`)) {
|
|
|
30887
30940
|
return collectionName;
|
|
30888
30941
|
}
|
|
30889
30942
|
async function getDefaultExecFileAsync() {
|
|
30890
|
-
const { execFile:
|
|
30943
|
+
const { execFile: execFile8 } = await import("node:child_process");
|
|
30891
30944
|
const { promisify: promisify14 } = await import("node:util");
|
|
30892
|
-
return promisify14(
|
|
30945
|
+
return promisify14(execFile8);
|
|
30893
30946
|
}
|
|
30894
30947
|
async function refreshQmdProjectMemoryIndex(rootDir, options) {
|
|
30895
30948
|
const key = resolve6(rootDir);
|
|
@@ -30904,14 +30957,14 @@ async function refreshQmdProjectMemoryIndex(rootDir, options) {
|
|
|
30904
30957
|
}
|
|
30905
30958
|
}
|
|
30906
30959
|
const promise = (async () => {
|
|
30907
|
-
const
|
|
30908
|
-
await ensureQmdProjectMemoryCollection(rootDir,
|
|
30909
|
-
await
|
|
30960
|
+
const execFileAsync6 = options?.execFileAsync ?? await getDefaultExecFileAsync();
|
|
30961
|
+
await ensureQmdProjectMemoryCollection(rootDir, execFileAsync6);
|
|
30962
|
+
await execFileAsync6("qmd", ["update"], {
|
|
30910
30963
|
cwd: rootDir,
|
|
30911
30964
|
timeout: 3e4,
|
|
30912
30965
|
maxBuffer: 1024 * 1024
|
|
30913
30966
|
});
|
|
30914
|
-
await
|
|
30967
|
+
await execFileAsync6("qmd", ["embed"], {
|
|
30915
30968
|
cwd: rootDir,
|
|
30916
30969
|
timeout: 12e4,
|
|
30917
30970
|
maxBuffer: 1024 * 1024
|
|
@@ -30936,8 +30989,8 @@ function scheduleQmdProjectMemoryRefresh(rootDir) {
|
|
|
30936
30989
|
}
|
|
30937
30990
|
async function isQmdAvailable() {
|
|
30938
30991
|
try {
|
|
30939
|
-
const
|
|
30940
|
-
await
|
|
30992
|
+
const execFileAsync6 = await getDefaultExecFileAsync();
|
|
30993
|
+
await execFileAsync6("qmd", ["--help"], {
|
|
30941
30994
|
timeout: 3e3,
|
|
30942
30995
|
maxBuffer: 128 * 1024
|
|
30943
30996
|
});
|
|
@@ -30947,12 +31000,12 @@ async function isQmdAvailable() {
|
|
|
30947
31000
|
}
|
|
30948
31001
|
}
|
|
30949
31002
|
async function installQmd(options) {
|
|
30950
|
-
const
|
|
31003
|
+
const execFileAsync6 = options?.execFileAsync ?? await getDefaultExecFileAsync();
|
|
30951
31004
|
const [command, ...args] = QMD_INSTALL_COMMAND.split(" ");
|
|
30952
31005
|
if (!command || args.length === 0) {
|
|
30953
31006
|
throw new MemoryBackendError("BACKEND_UNAVAILABLE", "qmd install command is not configured", "qmd");
|
|
30954
31007
|
}
|
|
30955
|
-
await
|
|
31008
|
+
await execFileAsync6(command, args, {
|
|
30956
31009
|
timeout: 12e4,
|
|
30957
31010
|
maxBuffer: 1024 * 1024
|
|
30958
31011
|
});
|
|
@@ -37212,6 +37265,42 @@ ${notificationsSection}`;
|
|
|
37212
37265
|
}
|
|
37213
37266
|
return this.todoStore;
|
|
37214
37267
|
}
|
|
37268
|
+
// ── Verification Cache ────────────────────────────────────────────────────
|
|
37269
|
+
/**
|
|
37270
|
+
* Look up a previously recorded verification cache pass for a given tree sha
|
|
37271
|
+
* and command pair. Returns null when no cached pass exists.
|
|
37272
|
+
*
|
|
37273
|
+
* @param treeSha - The git tree SHA of the merged commit.
|
|
37274
|
+
* @param testCommand - The test command string (normalized to empty string when absent).
|
|
37275
|
+
* @param buildCommand - The build command string (normalized to empty string when absent).
|
|
37276
|
+
*/
|
|
37277
|
+
getVerificationCacheHit(treeSha, testCommand, buildCommand2) {
|
|
37278
|
+
const normalizedTest = testCommand ?? "";
|
|
37279
|
+
const normalizedBuild = buildCommand2 ?? "";
|
|
37280
|
+
const row = this.db.prepare(
|
|
37281
|
+
`SELECT recordedAt, taskId FROM verification_cache
|
|
37282
|
+
WHERE treeSha = ? AND testCommand = ? AND buildCommand = ?`
|
|
37283
|
+
).get(treeSha, normalizedTest, normalizedBuild);
|
|
37284
|
+
return row ?? null;
|
|
37285
|
+
}
|
|
37286
|
+
/**
|
|
37287
|
+
* Record a successful verification pass for the given tree sha and commands.
|
|
37288
|
+
* Uses INSERT OR REPLACE so a re-run of the same tree updates the timestamp.
|
|
37289
|
+
*
|
|
37290
|
+
* @param treeSha - The git tree SHA of the merged commit.
|
|
37291
|
+
* @param testCommand - The test command string (normalized to empty string when absent).
|
|
37292
|
+
* @param buildCommand - The build command string (normalized to empty string when absent).
|
|
37293
|
+
* @param taskId - The task ID that triggered the pass (for telemetry).
|
|
37294
|
+
*/
|
|
37295
|
+
recordVerificationCachePass(treeSha, testCommand, buildCommand2, taskId) {
|
|
37296
|
+
const normalizedTest = testCommand ?? "";
|
|
37297
|
+
const normalizedBuild = buildCommand2 ?? "";
|
|
37298
|
+
const recordedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
37299
|
+
this.db.prepare(
|
|
37300
|
+
`INSERT OR REPLACE INTO verification_cache (treeSha, testCommand, buildCommand, recordedAt, taskId)
|
|
37301
|
+
VALUES (?, ?, ?, ?, ?)`
|
|
37302
|
+
).run(treeSha, normalizedTest, normalizedBuild, recordedAt, taskId);
|
|
37303
|
+
}
|
|
37215
37304
|
// ── Backward Compatibility (Multi-Project Support) ────────────────────────
|
|
37216
37305
|
};
|
|
37217
37306
|
}
|
|
@@ -40579,7 +40668,7 @@ var init_docker_provisioning = __esm({
|
|
|
40579
40668
|
});
|
|
40580
40669
|
|
|
40581
40670
|
// ../core/src/memory-insights.ts
|
|
40582
|
-
import { readFile as readFile11, writeFile as writeFile8, mkdir as mkdir9 } from "node:fs/promises";
|
|
40671
|
+
import { readFile as readFile11, writeFile as writeFile8, mkdir as mkdir9, unlink as unlink5 } from "node:fs/promises";
|
|
40583
40672
|
import { existsSync as existsSync17 } from "node:fs";
|
|
40584
40673
|
import { dirname as dirname7, join as join19 } from "node:path";
|
|
40585
40674
|
async function readWorkingMemory(rootDir) {
|
|
@@ -40589,7 +40678,35 @@ async function readWorkingMemory(rootDir) {
|
|
|
40589
40678
|
}
|
|
40590
40679
|
return readFile11(filePath, "utf-8");
|
|
40591
40680
|
}
|
|
40681
|
+
async function migrateLegacyArtifactIfNeeded(rootDir, canonicalPath, legacyPath) {
|
|
40682
|
+
const canonicalFilePath = join19(rootDir, canonicalPath);
|
|
40683
|
+
const legacyFilePath = join19(rootDir, legacyPath);
|
|
40684
|
+
if (existsSync17(canonicalFilePath) || !existsSync17(legacyFilePath)) {
|
|
40685
|
+
return;
|
|
40686
|
+
}
|
|
40687
|
+
const content = await readFile11(legacyFilePath, "utf-8");
|
|
40688
|
+
const canonicalDir = dirname7(canonicalFilePath);
|
|
40689
|
+
if (!existsSync17(canonicalDir)) {
|
|
40690
|
+
await mkdir9(canonicalDir, { recursive: true });
|
|
40691
|
+
}
|
|
40692
|
+
await writeFile8(canonicalFilePath, content, "utf-8");
|
|
40693
|
+
try {
|
|
40694
|
+
await unlink5(legacyFilePath);
|
|
40695
|
+
} catch {
|
|
40696
|
+
}
|
|
40697
|
+
}
|
|
40698
|
+
async function removeLegacyArtifactIfPresent(rootDir, legacyPath) {
|
|
40699
|
+
const legacyFilePath = join19(rootDir, legacyPath);
|
|
40700
|
+
if (!existsSync17(legacyFilePath)) {
|
|
40701
|
+
return;
|
|
40702
|
+
}
|
|
40703
|
+
try {
|
|
40704
|
+
await unlink5(legacyFilePath);
|
|
40705
|
+
} catch {
|
|
40706
|
+
}
|
|
40707
|
+
}
|
|
40592
40708
|
async function readInsightsMemory(rootDir) {
|
|
40709
|
+
await migrateLegacyArtifactIfNeeded(rootDir, MEMORY_INSIGHTS_PATH, LEGACY_MEMORY_INSIGHTS_PATH);
|
|
40593
40710
|
const filePath = join19(rootDir, MEMORY_INSIGHTS_PATH);
|
|
40594
40711
|
if (!existsSync17(filePath)) {
|
|
40595
40712
|
return null;
|
|
@@ -40598,11 +40715,12 @@ async function readInsightsMemory(rootDir) {
|
|
|
40598
40715
|
}
|
|
40599
40716
|
async function writeInsightsMemory(rootDir, content) {
|
|
40600
40717
|
const filePath = join19(rootDir, MEMORY_INSIGHTS_PATH);
|
|
40601
|
-
const dir =
|
|
40718
|
+
const dir = dirname7(filePath);
|
|
40602
40719
|
if (!existsSync17(dir)) {
|
|
40603
40720
|
await mkdir9(dir, { recursive: true });
|
|
40604
40721
|
}
|
|
40605
40722
|
await writeFile8(filePath, content, "utf-8");
|
|
40723
|
+
await removeLegacyArtifactIfPresent(rootDir, LEGACY_MEMORY_INSIGHTS_PATH);
|
|
40606
40724
|
}
|
|
40607
40725
|
async function writeWorkingMemory(rootDir, content) {
|
|
40608
40726
|
const filePath = join19(rootDir, MEMORY_WORKING_PATH);
|
|
@@ -40613,6 +40731,7 @@ async function writeWorkingMemory(rootDir, content) {
|
|
|
40613
40731
|
await writeFile8(filePath, content, "utf-8");
|
|
40614
40732
|
}
|
|
40615
40733
|
async function readMemoryAudit(rootDir) {
|
|
40734
|
+
await migrateLegacyArtifactIfNeeded(rootDir, MEMORY_AUDIT_PATH, LEGACY_MEMORY_AUDIT_PATH);
|
|
40616
40735
|
const filePath = join19(rootDir, MEMORY_AUDIT_PATH);
|
|
40617
40736
|
if (!existsSync17(filePath)) {
|
|
40618
40737
|
return null;
|
|
@@ -40621,13 +40740,15 @@ async function readMemoryAudit(rootDir) {
|
|
|
40621
40740
|
}
|
|
40622
40741
|
async function writeMemoryAudit(rootDir, content) {
|
|
40623
40742
|
const filePath = join19(rootDir, MEMORY_AUDIT_PATH);
|
|
40624
|
-
const dir =
|
|
40743
|
+
const dir = dirname7(filePath);
|
|
40625
40744
|
if (!existsSync17(dir)) {
|
|
40626
40745
|
await mkdir9(dir, { recursive: true });
|
|
40627
40746
|
}
|
|
40628
40747
|
await writeFile8(filePath, content, "utf-8");
|
|
40748
|
+
await removeLegacyArtifactIfPresent(rootDir, LEGACY_MEMORY_AUDIT_PATH);
|
|
40629
40749
|
}
|
|
40630
40750
|
async function readMemoryAuditState(rootDir) {
|
|
40751
|
+
await migrateLegacyArtifactIfNeeded(rootDir, MEMORY_AUDIT_STATE_PATH, LEGACY_MEMORY_AUDIT_STATE_PATH);
|
|
40631
40752
|
const filePath = join19(rootDir, MEMORY_AUDIT_STATE_PATH);
|
|
40632
40753
|
if (!existsSync17(filePath)) {
|
|
40633
40754
|
return null;
|
|
@@ -40648,11 +40769,12 @@ async function readMemoryAuditState(rootDir) {
|
|
|
40648
40769
|
}
|
|
40649
40770
|
async function writeMemoryAuditState(rootDir, state) {
|
|
40650
40771
|
const filePath = join19(rootDir, MEMORY_AUDIT_STATE_PATH);
|
|
40651
|
-
const dir =
|
|
40772
|
+
const dir = dirname7(filePath);
|
|
40652
40773
|
if (!existsSync17(dir)) {
|
|
40653
40774
|
await mkdir9(dir, { recursive: true });
|
|
40654
40775
|
}
|
|
40655
40776
|
await writeFile8(filePath, JSON.stringify(state, null, 2), "utf-8");
|
|
40777
|
+
await removeLegacyArtifactIfPresent(rootDir, LEGACY_MEMORY_AUDIT_STATE_PATH);
|
|
40656
40778
|
}
|
|
40657
40779
|
function isValidExtractionMetadata(value) {
|
|
40658
40780
|
if (!value || typeof value !== "object") {
|
|
@@ -40927,7 +41049,7 @@ function createInsightExtractionAutomation(settings, modelProvider, modelId) {
|
|
|
40927
41049
|
## Instructions
|
|
40928
41050
|
|
|
40929
41051
|
1. Read the working memory file at \`.fusion/memory/MEMORY.md\` using your file reading tools
|
|
40930
|
-
2. Read the existing insights file at \`.fusion/memory-insights.md\` (it may not exist yet)
|
|
41052
|
+
2. Read the existing insights file at \`.fusion/memory/memory-insights.md\` (it may not exist yet)
|
|
40931
41053
|
3. Analyze the working memory content and identify:
|
|
40932
41054
|
a) **New insights** that should be preserved in long-term memory
|
|
40933
41055
|
b) **Durable content** that should remain in working memory
|
|
@@ -41231,7 +41353,7 @@ async function generateMemoryAudit(rootDir, lastExtraction, pruningOutcome) {
|
|
|
41231
41353
|
id: "insights-memory-exists",
|
|
41232
41354
|
name: "Insights memory file exists",
|
|
41233
41355
|
passed: false,
|
|
41234
|
-
details: "File .fusion/memory-insights.md does not exist yet"
|
|
41356
|
+
details: "File .fusion/memory/memory-insights.md does not exist yet"
|
|
41235
41357
|
});
|
|
41236
41358
|
}
|
|
41237
41359
|
if (insightsMemoryExists) {
|
|
@@ -41477,14 +41599,17 @@ async function processAndAuditInsightExtraction(rootDir, input) {
|
|
|
41477
41599
|
}
|
|
41478
41600
|
return auditReport;
|
|
41479
41601
|
}
|
|
41480
|
-
var MEMORY_WORKING_PATH, MEMORY_INSIGHTS_PATH, MEMORY_AUDIT_PATH, MEMORY_AUDIT_STATE_PATH, DEFAULT_INSIGHT_SCHEDULE, DEFAULT_MIN_INTERVAL_MS, MIN_INSIGHT_GROWTH_CHARS, INSIGHT_EXTRACTION_SCHEDULE_NAME, REQUIRED_MEMORY_SECTIONS;
|
|
41602
|
+
var MEMORY_WORKING_PATH, MEMORY_INSIGHTS_PATH, MEMORY_AUDIT_PATH, MEMORY_AUDIT_STATE_PATH, LEGACY_MEMORY_INSIGHTS_PATH, LEGACY_MEMORY_AUDIT_PATH, LEGACY_MEMORY_AUDIT_STATE_PATH, DEFAULT_INSIGHT_SCHEDULE, DEFAULT_MIN_INTERVAL_MS, MIN_INSIGHT_GROWTH_CHARS, INSIGHT_EXTRACTION_SCHEDULE_NAME, REQUIRED_MEMORY_SECTIONS;
|
|
41481
41603
|
var init_memory_insights = __esm({
|
|
41482
41604
|
"../core/src/memory-insights.ts"() {
|
|
41483
41605
|
"use strict";
|
|
41484
41606
|
MEMORY_WORKING_PATH = ".fusion/memory/MEMORY.md";
|
|
41485
|
-
MEMORY_INSIGHTS_PATH = ".fusion/memory-insights.md";
|
|
41486
|
-
MEMORY_AUDIT_PATH = ".fusion/memory-audit.md";
|
|
41487
|
-
MEMORY_AUDIT_STATE_PATH = ".fusion/memory-audit-state.json";
|
|
41607
|
+
MEMORY_INSIGHTS_PATH = ".fusion/memory/memory-insights.md";
|
|
41608
|
+
MEMORY_AUDIT_PATH = ".fusion/memory/memory-audit.md";
|
|
41609
|
+
MEMORY_AUDIT_STATE_PATH = ".fusion/memory/memory-audit-state.json";
|
|
41610
|
+
LEGACY_MEMORY_INSIGHTS_PATH = ".fusion/memory-insights.md";
|
|
41611
|
+
LEGACY_MEMORY_AUDIT_PATH = ".fusion/memory-audit.md";
|
|
41612
|
+
LEGACY_MEMORY_AUDIT_STATE_PATH = ".fusion/memory-audit-state.json";
|
|
41488
41613
|
DEFAULT_INSIGHT_SCHEDULE = "0 2 * * *";
|
|
41489
41614
|
DEFAULT_MIN_INTERVAL_MS = 24 * 60 * 60 * 1e3;
|
|
41490
41615
|
MIN_INSIGHT_GROWTH_CHARS = 1e3;
|
|
@@ -51895,12 +52020,12 @@ function resolveExtractionRoot(tempDir) {
|
|
|
51895
52020
|
return tempDir;
|
|
51896
52021
|
}
|
|
51897
52022
|
async function extractTarArchive(archivePath, outputDir) {
|
|
51898
|
-
const [{ execFile:
|
|
52023
|
+
const [{ execFile: execFile8 }, { promisify: promisify14 }] = await Promise.all([
|
|
51899
52024
|
import("node:child_process"),
|
|
51900
52025
|
import("node:util")
|
|
51901
52026
|
]);
|
|
51902
|
-
const
|
|
51903
|
-
await
|
|
52027
|
+
const execFileAsync6 = promisify14(execFile8);
|
|
52028
|
+
await execFileAsync6("tar", ["xzf", archivePath, "-C", outputDir]);
|
|
51904
52029
|
}
|
|
51905
52030
|
async function parseCompanyArchive(archivePath) {
|
|
51906
52031
|
const resolvedArchivePath = resolve9(archivePath);
|
|
@@ -54016,7 +54141,13 @@ var init_research_orchestrator = __esm({
|
|
|
54016
54141
|
continue;
|
|
54017
54142
|
}
|
|
54018
54143
|
for (const source of result.data.slice(0, Math.max(0, config.maxSources - allSources.length))) {
|
|
54019
|
-
const saved = this.store.addSource(runId,
|
|
54144
|
+
const saved = this.store.addSource(runId, {
|
|
54145
|
+
...source,
|
|
54146
|
+
metadata: {
|
|
54147
|
+
...source.metadata ?? {},
|
|
54148
|
+
providerType: provider.type
|
|
54149
|
+
}
|
|
54150
|
+
});
|
|
54020
54151
|
allSources.push(saved);
|
|
54021
54152
|
this.store.addEvent(runId, {
|
|
54022
54153
|
type: "source_added",
|
|
@@ -54043,7 +54174,9 @@ var init_research_orchestrator = __esm({
|
|
|
54043
54174
|
sourceId: source.id
|
|
54044
54175
|
});
|
|
54045
54176
|
this.stepStarted(runId, step);
|
|
54046
|
-
const
|
|
54177
|
+
const sourceProvider = this.getSourceProviderType(source);
|
|
54178
|
+
const providerConfig = sourceProvider ? config.providers.find((p) => p.type === sourceProvider)?.config : provider?.config;
|
|
54179
|
+
const result = await this.stepRunner.runContentFetch(source.reference, sourceProvider, providerConfig, signal);
|
|
54047
54180
|
if (!result.ok || !result.data) {
|
|
54048
54181
|
this.stepFailed(runId, step.id, result.error?.message ?? "Failed to fetch source content", result.error);
|
|
54049
54182
|
continue;
|
|
@@ -54242,6 +54375,10 @@ var init_research_orchestrator = __esm({
|
|
|
54242
54375
|
throw signal.reason ?? new Error("Research run aborted");
|
|
54243
54376
|
}
|
|
54244
54377
|
}
|
|
54378
|
+
getSourceProviderType(source) {
|
|
54379
|
+
const providerType = source.metadata?.providerType;
|
|
54380
|
+
return typeof providerType === "string" && providerType.length > 0 ? providerType : void 0;
|
|
54381
|
+
}
|
|
54245
54382
|
canWriteRunData(runId) {
|
|
54246
54383
|
const run = this.store.getRun(runId);
|
|
54247
54384
|
if (!run) return false;
|
|
@@ -56805,8 +56942,8 @@ var init_research_step_runner = __esm({
|
|
|
56805
56942
|
return this.classifyError("source-query", error);
|
|
56806
56943
|
}
|
|
56807
56944
|
}
|
|
56808
|
-
async runContentFetch(url, config = {}, signal) {
|
|
56809
|
-
const provider = this.
|
|
56945
|
+
async runContentFetch(url, providerType, config = {}, signal) {
|
|
56946
|
+
const provider = this.resolveContentProvider(providerType);
|
|
56810
56947
|
if (!provider) {
|
|
56811
56948
|
return this.unconfigured("no configured provider available for content fetch");
|
|
56812
56949
|
}
|
|
@@ -56845,6 +56982,13 @@ var init_research_step_runner = __esm({
|
|
|
56845
56982
|
}
|
|
56846
56983
|
return void 0;
|
|
56847
56984
|
}
|
|
56985
|
+
resolveContentProvider(providerType) {
|
|
56986
|
+
if (providerType) {
|
|
56987
|
+
const selected = this.providers.get(providerType);
|
|
56988
|
+
if (selected?.isConfigured()) return selected;
|
|
56989
|
+
}
|
|
56990
|
+
return this.findFirstConfiguredProvider();
|
|
56991
|
+
}
|
|
56848
56992
|
classifyError(step, error) {
|
|
56849
56993
|
if (error instanceof ResearchStepTimeoutError) {
|
|
56850
56994
|
return { ok: false, error: { code: "timeout", message: error.message, retryable: true } };
|
|
@@ -57057,11 +57201,11 @@ async function refreshAgentMemoryQmdIndex(rootDir, agentMemory) {
|
|
|
57057
57201
|
return;
|
|
57058
57202
|
}
|
|
57059
57203
|
const promise = (async () => {
|
|
57060
|
-
const { execFile:
|
|
57204
|
+
const { execFile: execFile8 } = await import("node:child_process");
|
|
57061
57205
|
const { promisify: promisify14 } = await import("node:util");
|
|
57062
|
-
const
|
|
57206
|
+
const execFileAsync6 = promisify14(execFile8);
|
|
57063
57207
|
try {
|
|
57064
|
-
await
|
|
57208
|
+
await execFileAsync6("qmd", buildQmdAgentMemoryCollectionAddArgs(rootDir, agentMemory.agentId), {
|
|
57065
57209
|
cwd: rootDir,
|
|
57066
57210
|
timeout: 4e3,
|
|
57067
57211
|
maxBuffer: 512 * 1024
|
|
@@ -57074,8 +57218,8 @@ ${stderr}`)) {
|
|
|
57074
57218
|
throw error;
|
|
57075
57219
|
}
|
|
57076
57220
|
}
|
|
57077
|
-
await
|
|
57078
|
-
await
|
|
57221
|
+
await execFileAsync6("qmd", ["update"], { cwd: rootDir, timeout: 3e4, maxBuffer: 1024 * 1024 });
|
|
57222
|
+
await execFileAsync6("qmd", ["embed"], { cwd: rootDir, timeout: 12e4, maxBuffer: 1024 * 1024 });
|
|
57079
57223
|
})();
|
|
57080
57224
|
agentQmdRefreshState.set(key, { lastStartedAt: now, inFlight: promise });
|
|
57081
57225
|
try {
|
|
@@ -57123,10 +57267,10 @@ async function searchAgentMemoryWithQmd(rootDir, agentMemory, query, limit) {
|
|
|
57123
57267
|
}
|
|
57124
57268
|
try {
|
|
57125
57269
|
await refreshAgentMemoryQmdIndex(rootDir, agentMemory);
|
|
57126
|
-
const { execFile:
|
|
57270
|
+
const { execFile: execFile8 } = await import("node:child_process");
|
|
57127
57271
|
const { promisify: promisify14 } = await import("node:util");
|
|
57128
|
-
const
|
|
57129
|
-
const { stdout } = await
|
|
57272
|
+
const execFileAsync6 = promisify14(execFile8);
|
|
57273
|
+
const { stdout } = await execFileAsync6("qmd", buildQmdAgentMemorySearchArgs(rootDir, agentMemory.agentId, query, limit), {
|
|
57130
57274
|
cwd: rootDir,
|
|
57131
57275
|
timeout: 4e3,
|
|
57132
57276
|
maxBuffer: 1024 * 1024
|
|
@@ -57567,6 +57711,151 @@ ${lines.join("\n\n")}` }],
|
|
|
57567
57711
|
}
|
|
57568
57712
|
};
|
|
57569
57713
|
}
|
|
57714
|
+
function createGetAgentConfigTool(agentStore, callingAgentId) {
|
|
57715
|
+
return {
|
|
57716
|
+
name: "fn_get_agent_config",
|
|
57717
|
+
label: "Get Agent Config",
|
|
57718
|
+
description: "Read full configuration for one of your direct-report agents.",
|
|
57719
|
+
parameters: getAgentConfigParams,
|
|
57720
|
+
execute: async (_id, params) => {
|
|
57721
|
+
const target = await agentStore.getAgent(params.agent_id);
|
|
57722
|
+
if (!target) {
|
|
57723
|
+
return {
|
|
57724
|
+
content: [{ type: "text", text: `ERROR: Agent ${params.agent_id} not found` }],
|
|
57725
|
+
details: {}
|
|
57726
|
+
};
|
|
57727
|
+
}
|
|
57728
|
+
if (target.reportsTo !== callingAgentId) {
|
|
57729
|
+
return {
|
|
57730
|
+
content: [{ type: "text", text: "ERROR: You can only read configuration of agents that report to you" }],
|
|
57731
|
+
details: {}
|
|
57732
|
+
};
|
|
57733
|
+
}
|
|
57734
|
+
const runtimeConfig = target.runtimeConfig ?? {};
|
|
57735
|
+
const lines = [
|
|
57736
|
+
`Agent Config: ${target.name} (${target.id})`,
|
|
57737
|
+
`Role: ${target.role}`,
|
|
57738
|
+
`State: ${target.state}`,
|
|
57739
|
+
`Title: ${target.title ?? "(none)"}`,
|
|
57740
|
+
`Icon: ${target.icon ?? "(none)"}`,
|
|
57741
|
+
"",
|
|
57742
|
+
"Soul:",
|
|
57743
|
+
target.soul ?? "(none)",
|
|
57744
|
+
"",
|
|
57745
|
+
"Instructions Text:",
|
|
57746
|
+
target.instructionsText ?? "(none)",
|
|
57747
|
+
`Instructions Path: ${target.instructionsPath ?? "(none)"}`,
|
|
57748
|
+
`Heartbeat Procedure Path: ${target.heartbeatProcedurePath ?? "(none)"}`,
|
|
57749
|
+
"",
|
|
57750
|
+
"Runtime Config:",
|
|
57751
|
+
`heartbeatIntervalMs: ${String(runtimeConfig.heartbeatIntervalMs ?? "(default)")}`,
|
|
57752
|
+
`heartbeatTimeoutMs: ${String(runtimeConfig.heartbeatTimeoutMs ?? "(default)")}`,
|
|
57753
|
+
`maxConcurrentRuns: ${String(runtimeConfig.maxConcurrentRuns ?? "(default)")}`,
|
|
57754
|
+
`messageResponseMode: ${String(runtimeConfig.messageResponseMode ?? "(default)")}`,
|
|
57755
|
+
`budget: ${JSON.stringify(runtimeConfig.budget ?? null)}`,
|
|
57756
|
+
"",
|
|
57757
|
+
"Memory:",
|
|
57758
|
+
target.memory ?? "(none)"
|
|
57759
|
+
];
|
|
57760
|
+
return {
|
|
57761
|
+
content: [{ type: "text", text: lines.join("\n") }],
|
|
57762
|
+
details: { agent: target }
|
|
57763
|
+
};
|
|
57764
|
+
}
|
|
57765
|
+
};
|
|
57766
|
+
}
|
|
57767
|
+
function createUpdateAgentConfigTool(agentStore, callingAgentId) {
|
|
57768
|
+
return {
|
|
57769
|
+
name: "fn_update_agent_config",
|
|
57770
|
+
label: "Update Agent Config",
|
|
57771
|
+
description: "Update configuration for one of your direct-report agents.",
|
|
57772
|
+
parameters: updateAgentConfigParams,
|
|
57773
|
+
execute: async (_id, params) => {
|
|
57774
|
+
const target = await agentStore.getAgent(params.agent_id);
|
|
57775
|
+
if (!target) {
|
|
57776
|
+
return {
|
|
57777
|
+
content: [{ type: "text", text: `ERROR: Agent ${params.agent_id} not found` }],
|
|
57778
|
+
details: {}
|
|
57779
|
+
};
|
|
57780
|
+
}
|
|
57781
|
+
if (target.reportsTo !== callingAgentId) {
|
|
57782
|
+
return {
|
|
57783
|
+
content: [{ type: "text", text: "ERROR: You can only update configuration of agents that report to you" }],
|
|
57784
|
+
details: {}
|
|
57785
|
+
};
|
|
57786
|
+
}
|
|
57787
|
+
if (isEphemeralAgent(target)) {
|
|
57788
|
+
return {
|
|
57789
|
+
content: [{ type: "text", text: `ERROR: Cannot update ephemeral/runtime agent ${params.agent_id}` }],
|
|
57790
|
+
details: {}
|
|
57791
|
+
};
|
|
57792
|
+
}
|
|
57793
|
+
if (params.soul && params.soul.length > 1e4) {
|
|
57794
|
+
return {
|
|
57795
|
+
content: [{ type: "text", text: "ERROR: soul exceeds 10000 character limit" }],
|
|
57796
|
+
details: {}
|
|
57797
|
+
};
|
|
57798
|
+
}
|
|
57799
|
+
if (params.instructions_text && params.instructions_text.length > 5e4) {
|
|
57800
|
+
return {
|
|
57801
|
+
content: [{ type: "text", text: "ERROR: instructions_text exceeds 50000 character limit" }],
|
|
57802
|
+
details: {}
|
|
57803
|
+
};
|
|
57804
|
+
}
|
|
57805
|
+
if (params.instructions_path && params.instructions_path.length > 500) {
|
|
57806
|
+
return {
|
|
57807
|
+
content: [{ type: "text", text: "ERROR: instructions_path exceeds 500 character limit" }],
|
|
57808
|
+
details: {}
|
|
57809
|
+
};
|
|
57810
|
+
}
|
|
57811
|
+
if (params.heartbeat_procedure_path && params.heartbeat_procedure_path.length > 500) {
|
|
57812
|
+
return {
|
|
57813
|
+
content: [{ type: "text", text: "ERROR: heartbeat_procedure_path exceeds 500 character limit" }],
|
|
57814
|
+
details: {}
|
|
57815
|
+
};
|
|
57816
|
+
}
|
|
57817
|
+
const hasRuntimeConfigUpdates = [
|
|
57818
|
+
params.heartbeat_interval_ms,
|
|
57819
|
+
params.heartbeat_timeout_ms,
|
|
57820
|
+
params.max_concurrent_runs,
|
|
57821
|
+
params.message_response_mode
|
|
57822
|
+
].some((value) => value !== void 0);
|
|
57823
|
+
const updateInput = {};
|
|
57824
|
+
if (params.soul !== void 0) updateInput.soul = params.soul;
|
|
57825
|
+
if (params.instructions_text !== void 0) updateInput.instructionsText = params.instructions_text;
|
|
57826
|
+
if (params.instructions_path !== void 0) updateInput.instructionsPath = params.instructions_path;
|
|
57827
|
+
if (params.heartbeat_procedure_path !== void 0) updateInput.heartbeatProcedurePath = params.heartbeat_procedure_path;
|
|
57828
|
+
if (hasRuntimeConfigUpdates) {
|
|
57829
|
+
updateInput.runtimeConfig = {
|
|
57830
|
+
...target.runtimeConfig ?? {},
|
|
57831
|
+
...params.heartbeat_interval_ms !== void 0 ? { heartbeatIntervalMs: params.heartbeat_interval_ms } : {},
|
|
57832
|
+
...params.heartbeat_timeout_ms !== void 0 ? { heartbeatTimeoutMs: params.heartbeat_timeout_ms } : {},
|
|
57833
|
+
...params.max_concurrent_runs !== void 0 ? { maxConcurrentRuns: params.max_concurrent_runs } : {},
|
|
57834
|
+
...params.message_response_mode !== void 0 ? { messageResponseMode: params.message_response_mode } : {}
|
|
57835
|
+
};
|
|
57836
|
+
}
|
|
57837
|
+
if (Object.keys(updateInput).length === 0) {
|
|
57838
|
+
return {
|
|
57839
|
+
content: [{ type: "text", text: "ERROR: Provide at least one field to update" }],
|
|
57840
|
+
details: {}
|
|
57841
|
+
};
|
|
57842
|
+
}
|
|
57843
|
+
const updated = await agentStore.updateAgent(params.agent_id, updateInput);
|
|
57844
|
+
const updatedRuntimeConfig = updated.runtimeConfig ?? {};
|
|
57845
|
+
return {
|
|
57846
|
+
content: [{
|
|
57847
|
+
type: "text",
|
|
57848
|
+
text: `Updated ${updated.name} (${updated.id})
|
|
57849
|
+
heartbeatIntervalMs: ${String(updatedRuntimeConfig.heartbeatIntervalMs ?? "(default)")}
|
|
57850
|
+
heartbeatTimeoutMs: ${String(updatedRuntimeConfig.heartbeatTimeoutMs ?? "(default)")}
|
|
57851
|
+
maxConcurrentRuns: ${String(updatedRuntimeConfig.maxConcurrentRuns ?? "(default)")}
|
|
57852
|
+
messageResponseMode: ${String(updatedRuntimeConfig.messageResponseMode ?? "(default)")}`
|
|
57853
|
+
}],
|
|
57854
|
+
details: { agent: updated }
|
|
57855
|
+
};
|
|
57856
|
+
}
|
|
57857
|
+
};
|
|
57858
|
+
}
|
|
57570
57859
|
function createDelegateTaskTool(agentStore, taskStore, options) {
|
|
57571
57860
|
return {
|
|
57572
57861
|
name: "fn_delegate_task",
|
|
@@ -57896,7 +58185,7 @@ ${lines.join("\n")}`
|
|
|
57896
58185
|
}
|
|
57897
58186
|
};
|
|
57898
58187
|
}
|
|
57899
|
-
var taskCreateParams, taskLogParams, taskDocumentWriteParams, taskDocumentReadParams, reflectOnPerformanceParams, listAgentsParams, delegateTaskParams, sendMessageParams, readMessagesParams, memorySearchParams, memoryGetParams, researchRunParams, researchListParams, researchGetParams, researchCancelParams, memoryAppendParams, log11, AGENT_MEMORY_ROOT2, AGENT_MEMORY_FILENAME2, AGENT_DREAMS_FILENAME2, agentQmdRefreshState, AGENT_QMD_REFRESH_INTERVAL_MS, DAILY_AGENT_MEMORY_RE2;
|
|
58188
|
+
var taskCreateParams, taskLogParams, taskDocumentWriteParams, taskDocumentReadParams, reflectOnPerformanceParams, listAgentsParams, delegateTaskParams, getAgentConfigParams, updateAgentConfigParams, sendMessageParams, readMessagesParams, memorySearchParams, memoryGetParams, researchRunParams, researchListParams, researchGetParams, researchCancelParams, memoryAppendParams, log11, AGENT_MEMORY_ROOT2, AGENT_MEMORY_FILENAME2, AGENT_DREAMS_FILENAME2, agentQmdRefreshState, AGENT_QMD_REFRESH_INTERVAL_MS, DAILY_AGENT_MEMORY_RE2;
|
|
57900
58189
|
var init_agent_tools = __esm({
|
|
57901
58190
|
"../engine/src/agent-tools.ts"() {
|
|
57902
58191
|
"use strict";
|
|
@@ -57950,6 +58239,23 @@ var init_agent_tools = __esm({
|
|
|
57950
58239
|
Type.Array(Type.String(), { description: 'Task IDs this new task depends on (e.g. ["KB-001"])' })
|
|
57951
58240
|
)
|
|
57952
58241
|
});
|
|
58242
|
+
getAgentConfigParams = Type.Object({
|
|
58243
|
+
agent_id: Type.String({ description: "The agent ID to read configuration for" })
|
|
58244
|
+
});
|
|
58245
|
+
updateAgentConfigParams = Type.Object({
|
|
58246
|
+
agent_id: Type.String({ description: "The agent ID to update" }),
|
|
58247
|
+
soul: Type.Optional(Type.String({ description: "Agent personality/identity text", maxLength: 1e4 })),
|
|
58248
|
+
instructions_text: Type.Optional(Type.String({ description: "Inline custom instructions", maxLength: 5e4 })),
|
|
58249
|
+
instructions_path: Type.Optional(Type.String({ description: "Path to instructions markdown file", maxLength: 500 })),
|
|
58250
|
+
heartbeat_procedure_path: Type.Optional(Type.String({ description: "Path to heartbeat procedure markdown file", maxLength: 500 })),
|
|
58251
|
+
heartbeat_interval_ms: Type.Optional(Type.Number({ description: "Heartbeat polling interval in ms", minimum: 1e3 })),
|
|
58252
|
+
heartbeat_timeout_ms: Type.Optional(Type.Number({ description: "Heartbeat timeout in ms", minimum: 5e3 })),
|
|
58253
|
+
max_concurrent_runs: Type.Optional(Type.Number({ description: "Max concurrent heartbeat runs", minimum: 1 })),
|
|
58254
|
+
message_response_mode: Type.Optional(Type.Union([
|
|
58255
|
+
Type.Literal("immediate"),
|
|
58256
|
+
Type.Literal("on-heartbeat")
|
|
58257
|
+
], { description: "How agent responds to messages" }))
|
|
58258
|
+
});
|
|
57953
58259
|
sendMessageParams = Type.Object({
|
|
57954
58260
|
to_id: Type.String({ description: "Recipient ID (agent ID or user ID, depending on message type)" }),
|
|
57955
58261
|
content: Type.String({ description: "Message body (1-2000 characters)" }),
|
|
@@ -62085,7 +62391,8 @@ async function execWithProcessGroup(command, options) {
|
|
|
62085
62391
|
cwd: options.cwd,
|
|
62086
62392
|
shell: true,
|
|
62087
62393
|
detached: useProcessGroup,
|
|
62088
|
-
stdio: ["ignore", "pipe", "pipe"]
|
|
62394
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
62395
|
+
...options.env !== void 0 && { env: { ...process.env, ...options.env } }
|
|
62089
62396
|
});
|
|
62090
62397
|
let stdout = "";
|
|
62091
62398
|
let stderr = "";
|
|
@@ -62287,7 +62594,7 @@ ${footer}`;
|
|
|
62287
62594
|
return parts.join("\n") + `
|
|
62288
62595
|
${footer}`;
|
|
62289
62596
|
}
|
|
62290
|
-
async function runVerificationCommand(store, rootDir, taskId, command, type, signal, log18, agentLabel) {
|
|
62597
|
+
async function runVerificationCommand(store, rootDir, taskId, command, type, signal, log18, agentLabel, extraEnv) {
|
|
62291
62598
|
const logger2 = log18 ?? { log: console.log, error: console.error, warn: console.warn };
|
|
62292
62599
|
const label = agentLabel ?? "merger";
|
|
62293
62600
|
if (signal?.aborted) {
|
|
@@ -62312,7 +62619,8 @@ async function runVerificationCommand(store, rootDir, taskId, command, type, sig
|
|
|
62312
62619
|
cwd: rootDir,
|
|
62313
62620
|
timeout: VERIFICATION_COMMAND_TIMEOUT_MS,
|
|
62314
62621
|
maxBuffer: VERIFICATION_COMMAND_MAX_BUFFER,
|
|
62315
|
-
signal
|
|
62622
|
+
signal,
|
|
62623
|
+
...extraEnv !== void 0 && { env: extraEnv }
|
|
62316
62624
|
});
|
|
62317
62625
|
if (signal?.aborted) {
|
|
62318
62626
|
throw Object.assign(
|
|
@@ -62557,7 +62865,7 @@ var init_run_audit = __esm({
|
|
|
62557
62865
|
});
|
|
62558
62866
|
|
|
62559
62867
|
// ../engine/src/merger.ts
|
|
62560
|
-
import { execSync, exec as exec3 } from "node:child_process";
|
|
62868
|
+
import { execSync, exec as exec3, execFile as execFile3 } from "node:child_process";
|
|
62561
62869
|
import { promisify as promisify4 } from "node:util";
|
|
62562
62870
|
import { existsSync as existsSync24 } from "node:fs";
|
|
62563
62871
|
import { join as join30 } from "node:path";
|
|
@@ -62689,11 +62997,51 @@ function throwIfAborted(signal, taskId) {
|
|
|
62689
62997
|
if (!signal?.aborted) return;
|
|
62690
62998
|
throw new MergeAbortedError(`Merge aborted for ${taskId}: engine shutdown requested`);
|
|
62691
62999
|
}
|
|
63000
|
+
async function snapshotDirtyFiles(rootDir) {
|
|
63001
|
+
const paths = /* @__PURE__ */ new Set();
|
|
63002
|
+
try {
|
|
63003
|
+
const [unstagedOut, stagedOut, porcelainOut] = await Promise.all([
|
|
63004
|
+
execFileAsync("git", ["diff", "-z", "--name-only"], { cwd: rootDir, encoding: "utf-8" }).then(
|
|
63005
|
+
(r) => r.stdout,
|
|
63006
|
+
() => ""
|
|
63007
|
+
),
|
|
63008
|
+
execFileAsync("git", ["diff", "-z", "--cached", "--name-only"], { cwd: rootDir, encoding: "utf-8" }).then(
|
|
63009
|
+
(r) => r.stdout,
|
|
63010
|
+
() => ""
|
|
63011
|
+
),
|
|
63012
|
+
execFileAsync("git", ["status", "-z", "--porcelain"], { cwd: rootDir, encoding: "utf-8" }).then(
|
|
63013
|
+
(r) => r.stdout,
|
|
63014
|
+
() => ""
|
|
63015
|
+
)
|
|
63016
|
+
]);
|
|
63017
|
+
for (const entry of unstagedOut.split("\0")) {
|
|
63018
|
+
const p = entry.trim();
|
|
63019
|
+
if (p) paths.add(p);
|
|
63020
|
+
}
|
|
63021
|
+
for (const entry of stagedOut.split("\0")) {
|
|
63022
|
+
const p = entry.trim();
|
|
63023
|
+
if (p) paths.add(p);
|
|
63024
|
+
}
|
|
63025
|
+
for (const entry of porcelainOut.split("\0")) {
|
|
63026
|
+
if (!entry.startsWith("?? ")) continue;
|
|
63027
|
+
const p = entry.slice(3);
|
|
63028
|
+
if (p) paths.add(p);
|
|
63029
|
+
}
|
|
63030
|
+
} catch {
|
|
63031
|
+
}
|
|
63032
|
+
return paths;
|
|
63033
|
+
}
|
|
62692
63034
|
function rethrowIfMergeAborted(error) {
|
|
62693
63035
|
if (error instanceof Error && error.name === "MergeAbortedError") {
|
|
62694
63036
|
throw error;
|
|
62695
63037
|
}
|
|
62696
63038
|
}
|
|
63039
|
+
function execSyncText(command, options) {
|
|
63040
|
+
const output = execSync(command, options);
|
|
63041
|
+
if (output == null) return "";
|
|
63042
|
+
if (typeof output === "string") return output.trim();
|
|
63043
|
+
return output.toString("utf-8").trim();
|
|
63044
|
+
}
|
|
62697
63045
|
async function runDeterministicVerification(store, rootDir, taskId, testCommand, buildCommand2, testSource, buildSource, signal) {
|
|
62698
63046
|
const result = { allPassed: true };
|
|
62699
63047
|
if (!testCommand && !buildCommand2) {
|
|
@@ -62704,6 +63052,35 @@ async function runDeterministicVerification(store, rootDir, taskId, testCommand,
|
|
|
62704
63052
|
const normalizedBuildCommand = buildCommand2?.trim();
|
|
62705
63053
|
const hasTestCommand = !!normalizedTestCommand;
|
|
62706
63054
|
const hasBuildCommand = !!normalizedBuildCommand;
|
|
63055
|
+
const effectiveTestCommand = normalizedTestCommand ?? "";
|
|
63056
|
+
const effectiveBuildCommand = normalizedBuildCommand ?? "";
|
|
63057
|
+
let treeSha = null;
|
|
63058
|
+
try {
|
|
63059
|
+
treeSha = execSync("git rev-parse HEAD^{tree}", { cwd: rootDir, stdio: "pipe" }).toString().trim();
|
|
63060
|
+
} catch (err) {
|
|
63061
|
+
mergerLog.warn(`${taskId}: could not resolve tree sha \u2014 skipping verification cache: ${String(err)}`);
|
|
63062
|
+
}
|
|
63063
|
+
if (treeSha) {
|
|
63064
|
+
const cacheHit = store.getVerificationCacheHit(treeSha, effectiveTestCommand, effectiveBuildCommand);
|
|
63065
|
+
if (cacheHit) {
|
|
63066
|
+
const sha7 = treeSha.slice(0, 7);
|
|
63067
|
+
const msg = `Skipping deterministic verification \u2014 cached pass for tree ${sha7} (recorded at ${cacheHit.recordedAt}, by ${cacheHit.taskId ?? "unknown"})`;
|
|
63068
|
+
mergerLog.log(`${taskId}: ${msg}`);
|
|
63069
|
+
await store.logEntry(taskId, msg);
|
|
63070
|
+
await store.appendAgentLog(taskId, msg, "text", void 0, "merger");
|
|
63071
|
+
const syntheticResult = {
|
|
63072
|
+
command: "",
|
|
63073
|
+
exitCode: 0,
|
|
63074
|
+
stdout: "",
|
|
63075
|
+
stderr: "",
|
|
63076
|
+
success: true,
|
|
63077
|
+
cached: true
|
|
63078
|
+
};
|
|
63079
|
+
if (hasTestCommand) result.testResult = { ...syntheticResult, command: effectiveTestCommand };
|
|
63080
|
+
if (hasBuildCommand) result.buildResult = { ...syntheticResult, command: effectiveBuildCommand };
|
|
63081
|
+
return result;
|
|
63082
|
+
}
|
|
63083
|
+
}
|
|
62707
63084
|
const testSourceLabel = testSource === "inferred" ? " [inferred]" : "";
|
|
62708
63085
|
const buildSourceLabel = buildSource === "inferred" ? " [inferred]" : "";
|
|
62709
63086
|
mergerLog.log(
|
|
@@ -62777,13 +63154,23 @@ async function runDeterministicVerification(store, rootDir, taskId, testCommand,
|
|
|
62777
63154
|
mergerLog.log(`${taskId}: deterministic verification passed`);
|
|
62778
63155
|
await store.logEntry(taskId, "Deterministic merge verification passed");
|
|
62779
63156
|
await store.appendAgentLog(taskId, "Deterministic merge verification passed", "text", void 0, "merger");
|
|
63157
|
+
if (treeSha) {
|
|
63158
|
+
try {
|
|
63159
|
+
store.recordVerificationCachePass(treeSha, effectiveTestCommand, effectiveBuildCommand, taskId);
|
|
63160
|
+
mergerLog.log(`${taskId}: Recorded verification pass for tree ${treeSha.slice(0, 7)}`);
|
|
63161
|
+
await store.logEntry(taskId, `Recorded verification pass for tree ${treeSha.slice(0, 7)}`);
|
|
63162
|
+
} catch (err) {
|
|
63163
|
+
mergerLog.warn(`${taskId}: could not record verification cache pass: ${String(err)}`);
|
|
63164
|
+
}
|
|
63165
|
+
}
|
|
62780
63166
|
return result;
|
|
62781
63167
|
}
|
|
62782
63168
|
async function runVerificationCommand2(store, rootDir, taskId, command, type, signal) {
|
|
62783
63169
|
throwIfAborted(signal, taskId);
|
|
62784
|
-
return runVerificationCommand(store, rootDir, taskId, command, type, signal, mergerLog, "merger");
|
|
63170
|
+
return runVerificationCommand(store, rootDir, taskId, command, type, signal, mergerLog, "merger", VERIFICATION_EXTRA_ENV);
|
|
62785
63171
|
}
|
|
62786
|
-
async function attemptInMergeVerificationFix(store, rootDir, taskId, failureContext, settings, options, mergeRunContext, fixAttemptNumber, _testCommand, _buildCommand) {
|
|
63172
|
+
async function attemptInMergeVerificationFix(store, rootDir, taskId, failureContext, settings, options, mergeRunContext, fixAttemptNumber, _testCommand, _buildCommand, fixModifiedFiles) {
|
|
63173
|
+
const preFixSnapshot = await snapshotDirtyFiles(rootDir);
|
|
62787
63174
|
try {
|
|
62788
63175
|
mergerLog.log(`${taskId}: spawning in-merge verification fix agent`);
|
|
62789
63176
|
const logger2 = new AgentLogger({
|
|
@@ -62899,6 +63286,14 @@ ${failureContext.output.slice(0, VERIFICATION_LOG_MAX_CHARS)}
|
|
|
62899
63286
|
signal: options.signal
|
|
62900
63287
|
});
|
|
62901
63288
|
await accumulateSessionTokenUsage(store, taskId, session);
|
|
63289
|
+
const postFixSnapshot = await snapshotDirtyFiles(rootDir);
|
|
63290
|
+
if (fixModifiedFiles) {
|
|
63291
|
+
for (const p of postFixSnapshot) {
|
|
63292
|
+
if (!preFixSnapshot.has(p)) {
|
|
63293
|
+
fixModifiedFiles.add(p);
|
|
63294
|
+
}
|
|
63295
|
+
}
|
|
63296
|
+
}
|
|
62902
63297
|
await store.logEntry(
|
|
62903
63298
|
taskId,
|
|
62904
63299
|
`Re-running deterministic merge verification (attempt ${fixAttemptNumber ?? "unknown"})`
|
|
@@ -62925,6 +63320,17 @@ ${failureContext.output.slice(0, VERIFICATION_LOG_MAX_CHARS)}
|
|
|
62925
63320
|
}
|
|
62926
63321
|
} catch (err) {
|
|
62927
63322
|
rethrowIfMergeAborted(err);
|
|
63323
|
+
if (fixModifiedFiles) {
|
|
63324
|
+
try {
|
|
63325
|
+
const postFixSnapshot = await snapshotDirtyFiles(rootDir);
|
|
63326
|
+
for (const p of postFixSnapshot) {
|
|
63327
|
+
if (!preFixSnapshot.has(p)) {
|
|
63328
|
+
fixModifiedFiles.add(p);
|
|
63329
|
+
}
|
|
63330
|
+
}
|
|
63331
|
+
} catch {
|
|
63332
|
+
}
|
|
63333
|
+
}
|
|
62928
63334
|
const errorMessage = err instanceof Error ? err.message : String(err);
|
|
62929
63335
|
mergerLog.warn(`${taskId}: in-merge fix agent error: ${errorMessage}`);
|
|
62930
63336
|
await store.logEntry(taskId, "In-merge verification fix agent encountered an error", errorMessage);
|
|
@@ -63005,15 +63411,58 @@ ${trimmedDiffStat}` : ""
|
|
|
63005
63411
|
bodyArg: `-m "${escape(body)}"`
|
|
63006
63412
|
};
|
|
63007
63413
|
}
|
|
63008
|
-
async function commitOrAmendMergeWithFixes(rootDir, taskId, branch, commitLog, includeTaskId, preAttemptHeadSha, authorArg, diffStat, settings, signal, aiSummary, aiSubject) {
|
|
63414
|
+
async function commitOrAmendMergeWithFixes(rootDir, taskId, branch, commitLog, includeTaskId, preAttemptHeadSha, authorArg, diffStat, settings, signal, aiSummary, aiSubject, fixModifiedFiles = /* @__PURE__ */ new Set()) {
|
|
63009
63415
|
try {
|
|
63010
|
-
const { stdout:
|
|
63416
|
+
const { stdout: squashStagedOut } = await execAsync2("git diff --cached --name-only", {
|
|
63417
|
+
cwd: rootDir,
|
|
63418
|
+
encoding: "utf-8"
|
|
63419
|
+
});
|
|
63420
|
+
const squashStaged = new Set(squashStagedOut.split("\n").map((l) => l.trim()).filter(Boolean));
|
|
63421
|
+
const { stdout: unstagedOut } = await execAsync2("git diff --name-only", {
|
|
63422
|
+
cwd: rootDir,
|
|
63423
|
+
encoding: "utf-8"
|
|
63424
|
+
});
|
|
63425
|
+
const unstaged = new Set(unstagedOut.split("\n").map((l) => l.trim()).filter(Boolean));
|
|
63426
|
+
const { stdout: porcelainOut } = await execFileAsync("git", ["status", "-z", "--porcelain"], {
|
|
63011
63427
|
cwd: rootDir,
|
|
63012
63428
|
encoding: "utf-8"
|
|
63013
63429
|
});
|
|
63014
|
-
|
|
63015
|
-
|
|
63430
|
+
const untracked = /* @__PURE__ */ new Set();
|
|
63431
|
+
for (const entry of porcelainOut.split("\0")) {
|
|
63432
|
+
if (!entry.startsWith("?? ")) continue;
|
|
63433
|
+
const p = entry.slice(3);
|
|
63434
|
+
if (p) untracked.add(p);
|
|
63435
|
+
}
|
|
63436
|
+
const unstagedToStage = [];
|
|
63437
|
+
for (const p of unstaged) {
|
|
63438
|
+
if (fixModifiedFiles.has(p)) {
|
|
63439
|
+
unstagedToStage.push(p);
|
|
63440
|
+
} else {
|
|
63441
|
+
mergerLog.warn(
|
|
63442
|
+
`${taskId}: refusing to stage unrelated working-tree change: ${p} (not part of squash or in-merge fix)`
|
|
63443
|
+
);
|
|
63444
|
+
}
|
|
63445
|
+
}
|
|
63446
|
+
if (unstagedToStage.length > 0) {
|
|
63447
|
+
await execFileAsync("git", ["add", "--", ...unstagedToStage], { cwd: rootDir });
|
|
63448
|
+
}
|
|
63449
|
+
const untrackedToStage = [];
|
|
63450
|
+
for (const p of untracked) {
|
|
63451
|
+
if (fixModifiedFiles.has(p)) {
|
|
63452
|
+
untrackedToStage.push(p);
|
|
63453
|
+
} else {
|
|
63454
|
+
mergerLog.warn(
|
|
63455
|
+
`${taskId}: refusing to stage unrelated working-tree change: ${p} (not part of squash or in-merge fix)`
|
|
63456
|
+
);
|
|
63457
|
+
}
|
|
63016
63458
|
}
|
|
63459
|
+
if (untrackedToStage.length > 0) {
|
|
63460
|
+
await execFileAsync("git", ["add", "--", ...untrackedToStage], { cwd: rootDir });
|
|
63461
|
+
}
|
|
63462
|
+
const cap = (arr, n = 20) => arr.length <= n ? arr.join(", ") : `${arr.slice(0, n).join(", ")} ... (+${arr.length - n} more)`;
|
|
63463
|
+
mergerLog.log(
|
|
63464
|
+
`${taskId}: staging allowlist \u2014 squash: [${cap([...squashStaged])}], fixModified: [${cap([...fixModifiedFiles])}]`
|
|
63465
|
+
);
|
|
63017
63466
|
const { stdout: staged } = await execAsync2("git diff --cached --raw", {
|
|
63018
63467
|
cwd: rootDir,
|
|
63019
63468
|
encoding: "utf-8"
|
|
@@ -63281,8 +63730,8 @@ async function classifyConflict(filePath, cwd) {
|
|
|
63281
63730
|
}
|
|
63282
63731
|
async function resolveWithOurs(filePath, cwd) {
|
|
63283
63732
|
try {
|
|
63284
|
-
await
|
|
63285
|
-
await
|
|
63733
|
+
await execFileAsync("git", ["checkout", "--ours", "--", filePath], { cwd });
|
|
63734
|
+
await execFileAsync("git", ["add", "--", filePath], { cwd });
|
|
63286
63735
|
mergerLog.log(`Auto-resolved ${filePath} using --ours`);
|
|
63287
63736
|
} catch (error) {
|
|
63288
63737
|
throw new Error(`Failed to auto-resolve ${filePath} with ours: ${error}`);
|
|
@@ -63290,8 +63739,8 @@ async function resolveWithOurs(filePath, cwd) {
|
|
|
63290
63739
|
}
|
|
63291
63740
|
async function resolveWithTheirs(filePath, cwd) {
|
|
63292
63741
|
try {
|
|
63293
|
-
await
|
|
63294
|
-
await
|
|
63742
|
+
await execFileAsync("git", ["checkout", "--theirs", "--", filePath], { cwd });
|
|
63743
|
+
await execFileAsync("git", ["add", "--", filePath], { cwd });
|
|
63295
63744
|
mergerLog.log(`Auto-resolved ${filePath} using --theirs`);
|
|
63296
63745
|
} catch (error) {
|
|
63297
63746
|
throw new Error(`Failed to auto-resolve ${filePath} with theirs: ${error}`);
|
|
@@ -63299,7 +63748,7 @@ async function resolveWithTheirs(filePath, cwd) {
|
|
|
63299
63748
|
}
|
|
63300
63749
|
async function resolveTrivialWhitespace(filePath, cwd) {
|
|
63301
63750
|
try {
|
|
63302
|
-
await
|
|
63751
|
+
await execFileAsync("git", ["add", "--", filePath], { cwd });
|
|
63303
63752
|
mergerLog.log(`Auto-resolved ${filePath} (trivial whitespace)`);
|
|
63304
63753
|
} catch (error) {
|
|
63305
63754
|
throw new Error(`Failed to auto-resolve ${filePath} trivial conflict: ${error}`);
|
|
@@ -63607,7 +64056,7 @@ function parsePushRemoteTarget(rootDir, pushRemote) {
|
|
|
63607
64056
|
const remote = remoteToken || "origin";
|
|
63608
64057
|
let branch = branchTokens.join(" ").trim();
|
|
63609
64058
|
if (!branch) {
|
|
63610
|
-
branch =
|
|
64059
|
+
branch = execSyncText("git symbolic-ref --short HEAD", {
|
|
63611
64060
|
cwd: rootDir,
|
|
63612
64061
|
encoding: "utf-8",
|
|
63613
64062
|
stdio: "pipe"
|
|
@@ -63915,7 +64364,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
|
|
|
63915
64364
|
} catch {
|
|
63916
64365
|
result.error = `Branch '${branch}' not found \u2014 moving to done without merge`;
|
|
63917
64366
|
try {
|
|
63918
|
-
const commitSha =
|
|
64367
|
+
const commitSha = execSyncText("git rev-parse HEAD", {
|
|
63919
64368
|
cwd: rootDir,
|
|
63920
64369
|
stdio: "pipe",
|
|
63921
64370
|
encoding: "utf-8"
|
|
@@ -63938,12 +64387,12 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
|
|
|
63938
64387
|
}
|
|
63939
64388
|
try {
|
|
63940
64389
|
throwIfAborted(options.signal, taskId);
|
|
63941
|
-
const currentBranch =
|
|
64390
|
+
const currentBranch = execSyncText("git symbolic-ref --short HEAD", {
|
|
63942
64391
|
cwd: rootDir,
|
|
63943
64392
|
encoding: "utf-8",
|
|
63944
64393
|
stdio: "pipe"
|
|
63945
64394
|
}).trim();
|
|
63946
|
-
const mainBranch =
|
|
64395
|
+
const mainBranch = execSyncText("git rev-parse --abbrev-ref origin/HEAD", {
|
|
63947
64396
|
cwd: rootDir,
|
|
63948
64397
|
encoding: "utf-8",
|
|
63949
64398
|
stdio: "pipe"
|
|
@@ -64432,6 +64881,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
|
|
|
64432
64881
|
const failedType = verificationErr.verificationResult.testResult?.success === false ? "test" : "build";
|
|
64433
64882
|
if (failedResult) {
|
|
64434
64883
|
let fixSuccess = false;
|
|
64884
|
+
const verificationFixModifiedFiles = /* @__PURE__ */ new Set();
|
|
64435
64885
|
for (let fixAttempt = 1; fixAttempt <= maxFixRetries; fixAttempt++) {
|
|
64436
64886
|
const fixAttemptStartedAt = Date.now();
|
|
64437
64887
|
mergerLog.log(`${taskId}: in-merge verification fix attempt ${fixAttempt}/${maxFixRetries}`);
|
|
@@ -64459,7 +64909,8 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
|
|
|
64459
64909
|
{ runId: mergeRunId, agentId: engineRunContext.agentId },
|
|
64460
64910
|
fixAttempt,
|
|
64461
64911
|
effectiveTestCommand,
|
|
64462
|
-
effectiveBuildCommand
|
|
64912
|
+
effectiveBuildCommand,
|
|
64913
|
+
verificationFixModifiedFiles
|
|
64463
64914
|
);
|
|
64464
64915
|
const fixAttemptDurationMs = Date.now() - fixAttemptStartedAt;
|
|
64465
64916
|
if (fixSuccess) {
|
|
@@ -64498,7 +64949,8 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
|
|
|
64498
64949
|
settings,
|
|
64499
64950
|
options.signal,
|
|
64500
64951
|
aiMergeSummary,
|
|
64501
|
-
aiMergeSubject
|
|
64952
|
+
aiMergeSubject,
|
|
64953
|
+
verificationFixModifiedFiles
|
|
64502
64954
|
);
|
|
64503
64955
|
if (!finalized) {
|
|
64504
64956
|
resetMergeWithWarn(rootDir, taskId, "verification-fix finalize");
|
|
@@ -64529,6 +64981,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
|
|
|
64529
64981
|
const fixCommand = effectiveBuildCommand || effectiveTestCommand;
|
|
64530
64982
|
const fixType = effectiveBuildCommand ? "build" : "test";
|
|
64531
64983
|
let fixSuccess = false;
|
|
64984
|
+
const buildFixModifiedFiles = /* @__PURE__ */ new Set();
|
|
64532
64985
|
for (let fixAttempt = 1; fixAttempt <= maxFixRetries; fixAttempt++) {
|
|
64533
64986
|
const fixAttemptStartedAt = Date.now();
|
|
64534
64987
|
mergerLog.log(`${taskId}: in-merge verification fix attempt ${fixAttempt}/${maxFixRetries}`);
|
|
@@ -64556,7 +65009,8 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
|
|
|
64556
65009
|
{ runId: mergeRunId, agentId: engineRunContext.agentId },
|
|
64557
65010
|
fixAttempt,
|
|
64558
65011
|
effectiveTestCommand,
|
|
64559
|
-
effectiveBuildCommand
|
|
65012
|
+
effectiveBuildCommand,
|
|
65013
|
+
buildFixModifiedFiles
|
|
64560
65014
|
);
|
|
64561
65015
|
const fixAttemptDurationMs = Date.now() - fixAttemptStartedAt;
|
|
64562
65016
|
if (fixSuccess) {
|
|
@@ -64594,7 +65048,8 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
|
|
|
64594
65048
|
settings,
|
|
64595
65049
|
options.signal,
|
|
64596
65050
|
aiMergeSummary,
|
|
64597
|
-
aiMergeSubject
|
|
65051
|
+
aiMergeSubject,
|
|
65052
|
+
buildFixModifiedFiles
|
|
64598
65053
|
);
|
|
64599
65054
|
if (!finalized) {
|
|
64600
65055
|
resetMergeWithWarn(rootDir, taskId, "build-verification fix finalize");
|
|
@@ -64675,7 +65130,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
|
|
|
64675
65130
|
throw new Error(`AI merge failed for ${taskId}: all 3 attempts exhausted`);
|
|
64676
65131
|
}
|
|
64677
65132
|
try {
|
|
64678
|
-
const commitSha =
|
|
65133
|
+
const commitSha = execSyncText("git rev-parse HEAD", {
|
|
64679
65134
|
cwd: rootDir,
|
|
64680
65135
|
stdio: "pipe",
|
|
64681
65136
|
encoding: "utf-8"
|
|
@@ -64891,7 +65346,7 @@ async function aiMergeTask(store, rootDir, taskId, options = {}) {
|
|
|
64891
65346
|
async function tryFastForwardFromOrigin(rootDir, taskId) {
|
|
64892
65347
|
let currentBranch;
|
|
64893
65348
|
try {
|
|
64894
|
-
currentBranch =
|
|
65349
|
+
currentBranch = execSyncText("git rev-parse --abbrev-ref HEAD", {
|
|
64895
65350
|
cwd: rootDir,
|
|
64896
65351
|
encoding: "utf-8",
|
|
64897
65352
|
stdio: "pipe"
|
|
@@ -64909,7 +65364,7 @@ async function tryFastForwardFromOrigin(rootDir, taskId) {
|
|
|
64909
65364
|
let behind = 0;
|
|
64910
65365
|
let ahead = 0;
|
|
64911
65366
|
try {
|
|
64912
|
-
const counts =
|
|
65367
|
+
const counts = execSyncText(`git rev-list --left-right --count "origin/${currentBranch}...HEAD"`, {
|
|
64913
65368
|
cwd: rootDir,
|
|
64914
65369
|
encoding: "utf-8",
|
|
64915
65370
|
stdio: "pipe"
|
|
@@ -65048,7 +65503,7 @@ async function executeMergeAttempt(params, aiTracker) {
|
|
|
65048
65503
|
}
|
|
65049
65504
|
}
|
|
65050
65505
|
if (complex.length === 0) {
|
|
65051
|
-
const staged =
|
|
65506
|
+
const staged = execSyncText("git diff --cached --quiet 2>&1; echo $?", {
|
|
65052
65507
|
cwd: rootDir,
|
|
65053
65508
|
encoding: "utf-8"
|
|
65054
65509
|
}).trim();
|
|
@@ -65149,7 +65604,7 @@ async function executeMergeAttempt(params, aiTracker) {
|
|
|
65149
65604
|
}
|
|
65150
65605
|
return true;
|
|
65151
65606
|
}
|
|
65152
|
-
const conflictedOutput =
|
|
65607
|
+
const conflictedOutput = execSyncText("git diff --name-only --diff-filter=U", {
|
|
65153
65608
|
cwd: rootDir,
|
|
65154
65609
|
encoding: "utf-8"
|
|
65155
65610
|
}).trim();
|
|
@@ -65270,7 +65725,7 @@ async function attemptWithSideStrategy(params, side = "theirs", aiTracker) {
|
|
|
65270
65725
|
await execAsync2(`git merge -X ${side} --squash "${branch}"`, {
|
|
65271
65726
|
cwd: rootDir
|
|
65272
65727
|
});
|
|
65273
|
-
const conflictedOutput =
|
|
65728
|
+
const conflictedOutput = execSyncText("git diff --name-only --diff-filter=U", {
|
|
65274
65729
|
cwd: rootDir,
|
|
65275
65730
|
encoding: "utf-8"
|
|
65276
65731
|
}).trim();
|
|
@@ -65278,7 +65733,7 @@ async function attemptWithSideStrategy(params, side = "theirs", aiTracker) {
|
|
|
65278
65733
|
mergerLog.warn(`${taskId}: -X ${side} left unresolved conflicts: ${conflictedOutput}`);
|
|
65279
65734
|
return false;
|
|
65280
65735
|
}
|
|
65281
|
-
const staged =
|
|
65736
|
+
const staged = execSyncText("git diff --cached --quiet 2>&1; echo $?", {
|
|
65282
65737
|
cwd: rootDir,
|
|
65283
65738
|
encoding: "utf-8"
|
|
65284
65739
|
}).trim();
|
|
@@ -65540,7 +65995,7 @@ async function runAiAgentForCommit(params) {
|
|
|
65540
65995
|
mergerLog.error(`Build verification failed for ${taskId}: ${buildErrorMessage}`);
|
|
65541
65996
|
return { success: false, error: buildErrorMessage };
|
|
65542
65997
|
}
|
|
65543
|
-
const staged =
|
|
65998
|
+
const staged = execSyncText("git diff --cached --quiet 2>&1; echo $?", {
|
|
65544
65999
|
cwd: rootDir,
|
|
65545
66000
|
encoding: "utf-8"
|
|
65546
66001
|
}).trim();
|
|
@@ -65935,7 +66390,7 @@ async function completeTask(store, taskId, result) {
|
|
|
65935
66390
|
result.task = task;
|
|
65936
66391
|
store.emit("task:merged", result);
|
|
65937
66392
|
}
|
|
65938
|
-
var execAsync2, LOCKFILE_PATTERNS, GENERATED_PATTERNS, DEPENDENCY_SYNC_TRIGGER_PATTERNS, WORKFLOW_SCRIPT_OUTPUT_MAX_CHARS, PULL_REBASE_TIMEOUT_MS, PUSH_TIMEOUT_MS, MERGE_COMMIT_LOG_MAX_CHARS, MERGE_DIFF_STAT_MAX_CHARS, VerificationError, MergeAbortedError, FUSION_TASK_ID_TRAILER_KEY;
|
|
66393
|
+
var execAsync2, execFileAsync, LOCKFILE_PATTERNS, GENERATED_PATTERNS, DEPENDENCY_SYNC_TRIGGER_PATTERNS, WORKFLOW_SCRIPT_OUTPUT_MAX_CHARS, PULL_REBASE_TIMEOUT_MS, PUSH_TIMEOUT_MS, MERGE_COMMIT_LOG_MAX_CHARS, MERGE_DIFF_STAT_MAX_CHARS, VerificationError, MergeAbortedError, VERIFICATION_EXTRA_ENV, FUSION_TASK_ID_TRAILER_KEY;
|
|
65939
66394
|
var init_merger = __esm({
|
|
65940
66395
|
"../engine/src/merger.ts"() {
|
|
65941
66396
|
"use strict";
|
|
@@ -65955,6 +66410,7 @@ var init_merger = __esm({
|
|
|
65955
66410
|
init_agent_instructions();
|
|
65956
66411
|
init_run_audit();
|
|
65957
66412
|
execAsync2 = promisify4(exec3);
|
|
66413
|
+
execFileAsync = promisify4(execFile3);
|
|
65958
66414
|
LOCKFILE_PATTERNS = [
|
|
65959
66415
|
"package-lock.json",
|
|
65960
66416
|
"pnpm-lock.yaml",
|
|
@@ -66009,6 +66465,13 @@ var init_merger = __esm({
|
|
|
66009
66465
|
this.name = "MergeAbortedError";
|
|
66010
66466
|
}
|
|
66011
66467
|
};
|
|
66468
|
+
VERIFICATION_EXTRA_ENV = Object.fromEntries(
|
|
66469
|
+
[
|
|
66470
|
+
["FUSION_TEST_TOTAL_WORKERS", "8"],
|
|
66471
|
+
["FUSION_TEST_CONCURRENCY", "4"],
|
|
66472
|
+
["FUSION_TEST_WORKSPACE_CONCURRENCY", "4"]
|
|
66473
|
+
].filter(([key]) => !(key in process.env))
|
|
66474
|
+
);
|
|
66012
66475
|
FUSION_TASK_ID_TRAILER_KEY = "Fusion-Task-Id";
|
|
66013
66476
|
}
|
|
66014
66477
|
});
|
|
@@ -70136,7 +70599,11 @@ ${summary}`,
|
|
|
70136
70599
|
// Agent delegation tools — discover and delegate work to other agents.
|
|
70137
70600
|
...this.options.agentStore ? [
|
|
70138
70601
|
createListAgentsTool(this.options.agentStore),
|
|
70139
|
-
createDelegateTaskTool(this.options.agentStore, this.store, { rootDir: this.rootDir })
|
|
70602
|
+
createDelegateTaskTool(this.options.agentStore, this.store, { rootDir: this.rootDir }),
|
|
70603
|
+
...assignedAgentId ? [
|
|
70604
|
+
createGetAgentConfigTool(this.options.agentStore, assignedAgentId),
|
|
70605
|
+
createUpdateAgentConfigTool(this.options.agentStore, assignedAgentId)
|
|
70606
|
+
] : []
|
|
70140
70607
|
] : [],
|
|
70141
70608
|
// Messaging tools — allows executor agents to send and receive messages.
|
|
70142
70609
|
...this.options.messageStore && assignedAgentId ? [
|
|
@@ -76142,6 +76609,7 @@ Your job:
|
|
|
76142
76609
|
2. Do ONE useful action that changes project clarity or flow.
|
|
76143
76610
|
3. Use fn_task_create to spawn follow-up work, fn_task_log to record observations, and fn_task_document_write for durable artifacts.
|
|
76144
76611
|
4. Use fn_list_agents + fn_delegate_task when work should be assigned to a specific capable agent now.
|
|
76612
|
+
5. Use fn_get_agent_config and fn_update_agent_config to tune direct reports before delegating recurring work.
|
|
76145
76613
|
5. Call fn_heartbeat_done when finished with an optional summary of what was accomplished.
|
|
76146
76614
|
|
|
76147
76615
|
Examples of ONE useful action:
|
|
@@ -76165,6 +76633,7 @@ Use this decision rule:
|
|
|
76165
76633
|
- **Task document (fn_task_document_write):** when findings are structured and likely useful across future sessions for the same task.
|
|
76166
76634
|
- **Create task (fn_task_create):** when someone must do new executable work.
|
|
76167
76635
|
- **Delegate task (fn_delegate_task):** when that new work should go to a specific agent based on role/availability.
|
|
76636
|
+
- **Manage report config (fn_get_agent_config / fn_update_agent_config):** when direct reports need heartbeat, instruction, or personality tuning.
|
|
76168
76637
|
|
|
76169
76638
|
Prefer fn_task_create when assignment is unclear and scheduler routing is fine.
|
|
76170
76639
|
Prefer fn_delegate_task when immediate ownership by a specific agent materially reduces latency or risk.
|
|
@@ -76218,6 +76687,7 @@ Your job:
|
|
|
76218
76687
|
2. Do ONE useful action: analyze, create follow-up tasks, delegate work, or update memory.
|
|
76219
76688
|
3. Use fn_task_create to spawn follow-up work.
|
|
76220
76689
|
4. Use fn_list_agents and fn_delegate_task to coordinate with other agents.
|
|
76690
|
+
5. Use fn_get_agent_config and fn_update_agent_config to read/tune direct-report agents for better routing outcomes.
|
|
76221
76691
|
5. Call fn_heartbeat_done when finished with an optional summary of what was accomplished.
|
|
76222
76692
|
|
|
76223
76693
|
Examples of ONE useful action:
|
|
@@ -76231,6 +76701,7 @@ Keep work lightweight \u2014 this is a single-pass ambient check, not a full imp
|
|
|
76231
76701
|
You have readonly file access plus:
|
|
76232
76702
|
- fn_task_create
|
|
76233
76703
|
- fn_list_agents and fn_delegate_task
|
|
76704
|
+
- fn_get_agent_config and fn_update_agent_config (for direct reports only)
|
|
76234
76705
|
- fn_memory_search, fn_memory_get, and fn_memory_append
|
|
76235
76706
|
- fn_heartbeat_done
|
|
76236
76707
|
- fn_send_message and fn_read_messages when messaging is enabled for this run (they may not always be available)
|
|
@@ -77072,6 +77543,8 @@ not loop on the same plan across heartbeats without recording why.`;
|
|
|
77072
77543
|
}, { rootDir: this.rootDir }));
|
|
77073
77544
|
heartbeatTools.push(createListAgentsTool(this.store));
|
|
77074
77545
|
heartbeatTools.push(createDelegateTaskTool(this.store, taskStore, { rootDir: this.rootDir }));
|
|
77546
|
+
heartbeatTools.push(createGetAgentConfigTool(this.store, agentId));
|
|
77547
|
+
heartbeatTools.push(createUpdateAgentConfigTool(this.store, agentId));
|
|
77075
77548
|
if (this.messageStore) {
|
|
77076
77549
|
heartbeatTools.push(createSendMessageTool(this.messageStore, agentId));
|
|
77077
77550
|
heartbeatTools.push(createReadMessagesTool(this.messageStore, agentId));
|
|
@@ -77493,6 +77966,8 @@ ${taskDetail.prompt}` : "No PROMPT.md available.",
|
|
|
77493
77966
|
tools.push(createTaskDocumentReadTool(taskStore, taskId));
|
|
77494
77967
|
tools.push(createListAgentsTool(this.store));
|
|
77495
77968
|
tools.push(createDelegateTaskTool(this.store, taskStore, { rootDir: this.rootDir }));
|
|
77969
|
+
tools.push(createGetAgentConfigTool(this.store, agentId));
|
|
77970
|
+
tools.push(createUpdateAgentConfigTool(this.store, agentId));
|
|
77496
77971
|
if (messageStore) {
|
|
77497
77972
|
tools.push(createSendMessageTool(messageStore, agentId));
|
|
77498
77973
|
tools.push(createReadMessagesTool(messageStore, agentId));
|
|
@@ -82343,6 +82818,11 @@ var init_in_process_runtime = __esm({
|
|
|
82343
82818
|
this.recordActivity();
|
|
82344
82819
|
runtimeLog.log(`Started executing task ${task.id} in ${worktreePath}`);
|
|
82345
82820
|
if (this.agentStore) {
|
|
82821
|
+
if (this.taskAgentMap.has(task.id)) {
|
|
82822
|
+
runtimeLog.warn(`Skipping task-worker creation for ${task.id}: agent already exists (${this.taskAgentMap.get(task.id)})`);
|
|
82823
|
+
return;
|
|
82824
|
+
}
|
|
82825
|
+
this.taskAgentMap.set(task.id, "creating");
|
|
82346
82826
|
this.agentStore.createAgent({
|
|
82347
82827
|
name: `executor-${task.id}`,
|
|
82348
82828
|
role: "executor",
|
|
@@ -82360,6 +82840,7 @@ var init_in_process_runtime = __esm({
|
|
|
82360
82840
|
await this.agentStore.updateAgentState(agent.id, "active");
|
|
82361
82841
|
await this.agentStore.updateAgentState(agent.id, "running");
|
|
82362
82842
|
}).catch((err) => {
|
|
82843
|
+
this.taskAgentMap.delete(task.id);
|
|
82363
82844
|
runtimeLog.warn(`Failed to create agent for task ${task.id}:`, err);
|
|
82364
82845
|
});
|
|
82365
82846
|
}
|
|
@@ -84876,7 +85357,7 @@ var init_provider_adapters = __esm({
|
|
|
84876
85357
|
|
|
84877
85358
|
// ../engine/src/remote-access/tunnel-process-manager.ts
|
|
84878
85359
|
import { EventEmitter as EventEmitter23 } from "node:events";
|
|
84879
|
-
import { exec as exec10, execFile as
|
|
85360
|
+
import { exec as exec10, execFile as execFile4, spawn as spawn5 } from "node:child_process";
|
|
84880
85361
|
import { promisify as promisify10 } from "node:util";
|
|
84881
85362
|
function nowIso() {
|
|
84882
85363
|
return (/* @__PURE__ */ new Date()).toISOString();
|
|
@@ -84917,7 +85398,7 @@ function toStateError(code, err) {
|
|
|
84917
85398
|
at: nowIso()
|
|
84918
85399
|
};
|
|
84919
85400
|
}
|
|
84920
|
-
var DEFAULT_MAX_LOG_ENTRIES, DEFAULT_STOP_TIMEOUT_MS2,
|
|
85401
|
+
var DEFAULT_MAX_LOG_ENTRIES, DEFAULT_STOP_TIMEOUT_MS2, execFileAsync2, execAsync8, LineBuffer, TunnelProcessManager;
|
|
84921
85402
|
var init_tunnel_process_manager = __esm({
|
|
84922
85403
|
"../engine/src/remote-access/tunnel-process-manager.ts"() {
|
|
84923
85404
|
"use strict";
|
|
@@ -84925,7 +85406,7 @@ var init_tunnel_process_manager = __esm({
|
|
|
84925
85406
|
init_provider_adapters();
|
|
84926
85407
|
DEFAULT_MAX_LOG_ENTRIES = 400;
|
|
84927
85408
|
DEFAULT_STOP_TIMEOUT_MS2 = 5e3;
|
|
84928
|
-
|
|
85409
|
+
execFileAsync2 = promisify10(execFile4);
|
|
84929
85410
|
execAsync8 = promisify10(exec10);
|
|
84930
85411
|
LineBuffer = class {
|
|
84931
85412
|
pending = "";
|
|
@@ -85003,7 +85484,7 @@ var init_tunnel_process_manager = __esm({
|
|
|
85003
85484
|
return null;
|
|
85004
85485
|
}
|
|
85005
85486
|
try {
|
|
85006
|
-
const { stdout } = await
|
|
85487
|
+
const { stdout } = await execFileAsync2("tailscale", ["status", "--json"], { timeout: 3e3 });
|
|
85007
85488
|
const data = JSON.parse(String(stdout));
|
|
85008
85489
|
const dnsName = data.Self?.DNSName?.replace(/\.$/, "");
|
|
85009
85490
|
if (!dnsName) {
|
|
@@ -85026,7 +85507,7 @@ var init_tunnel_process_manager = __esm({
|
|
|
85026
85507
|
];
|
|
85027
85508
|
for (const resetCommand of resetCommands) {
|
|
85028
85509
|
try {
|
|
85029
|
-
await
|
|
85510
|
+
await execFileAsync2(resetCommand.command, resetCommand.args, { timeout: 5e3 });
|
|
85030
85511
|
return;
|
|
85031
85512
|
} catch {
|
|
85032
85513
|
}
|
|
@@ -85325,7 +85806,7 @@ var init_tunnel_process_manager = __esm({
|
|
|
85325
85806
|
});
|
|
85326
85807
|
|
|
85327
85808
|
// ../engine/src/project-engine.ts
|
|
85328
|
-
import { execFile as
|
|
85809
|
+
import { execFile as execFile5 } from "node:child_process";
|
|
85329
85810
|
import { promisify as promisify11 } from "node:util";
|
|
85330
85811
|
function formatErrorDetails(error) {
|
|
85331
85812
|
if (error instanceof Error) {
|
|
@@ -85337,7 +85818,7 @@ function formatErrorDetails(error) {
|
|
|
85337
85818
|
const detail = String(error);
|
|
85338
85819
|
return { message: detail, detail };
|
|
85339
85820
|
}
|
|
85340
|
-
var
|
|
85821
|
+
var execFileAsync3, MERGE_HANDOFF_GRACE_MS, isRemoteActive, ProjectEngine;
|
|
85341
85822
|
var init_project_engine = __esm({
|
|
85342
85823
|
"../engine/src/project-engine.ts"() {
|
|
85343
85824
|
"use strict";
|
|
@@ -85355,7 +85836,7 @@ var init_project_engine = __esm({
|
|
|
85355
85836
|
init_research_orchestrator();
|
|
85356
85837
|
init_research_step_runner();
|
|
85357
85838
|
init_tunnel_process_manager();
|
|
85358
|
-
|
|
85839
|
+
execFileAsync3 = promisify11(execFile5);
|
|
85359
85840
|
MERGE_HANDOFF_GRACE_MS = 300;
|
|
85360
85841
|
isRemoteActive = (ra) => ra?.activeProvider != null && (ra.providers[ra.activeProvider]?.enabled ?? false);
|
|
85361
85842
|
ProjectEngine = class _ProjectEngine {
|
|
@@ -85996,7 +86477,7 @@ ${detail}`
|
|
|
85996
86477
|
async checkExecutableAvailable(command) {
|
|
85997
86478
|
const checker = process.platform === "win32" ? "where" : "which";
|
|
85998
86479
|
try {
|
|
85999
|
-
await
|
|
86480
|
+
await execFileAsync3(checker, [command]);
|
|
86000
86481
|
return { available: true };
|
|
86001
86482
|
} catch {
|
|
86002
86483
|
return {
|
|
@@ -86087,7 +86568,11 @@ ${detail}`
|
|
|
86087
86568
|
if (this.mergeActive.has(taskId)) return;
|
|
86088
86569
|
this.mergeActive.add(taskId);
|
|
86089
86570
|
this.mergeQueue.push(taskId);
|
|
86090
|
-
void this.drainMergeQueue()
|
|
86571
|
+
void this.drainMergeQueue().catch((err) => {
|
|
86572
|
+
runtimeLog.error(
|
|
86573
|
+
`Merge queue drain failed unexpectedly: ${err instanceof Error ? err.message : String(err)}`
|
|
86574
|
+
);
|
|
86575
|
+
});
|
|
86091
86576
|
}
|
|
86092
86577
|
/**
|
|
86093
86578
|
* Filter a sweep's listTasks() result to merge-eligible tasks, sort by
|
|
@@ -86609,15 +87094,19 @@ ${detail}`
|
|
|
86609
87094
|
runtimeLog.warn(
|
|
86610
87095
|
`Auto-merge periodic sweep failed: ${err instanceof Error ? err.message : String(err)}`
|
|
86611
87096
|
);
|
|
86612
|
-
}
|
|
86613
|
-
|
|
86614
|
-
|
|
86615
|
-
|
|
86616
|
-
|
|
86617
|
-
|
|
86618
|
-
|
|
86619
|
-
|
|
86620
|
-
|
|
87097
|
+
} finally {
|
|
87098
|
+
if (!this.shuttingDown) {
|
|
87099
|
+
let interval = 15e3;
|
|
87100
|
+
try {
|
|
87101
|
+
const settings = await store.getSettings();
|
|
87102
|
+
interval = settings.pollIntervalMs ?? 15e3;
|
|
87103
|
+
} catch (err) {
|
|
87104
|
+
runtimeLog.warn(
|
|
87105
|
+
`Auto-merge retry: failed to read pollIntervalMs, using default 15s: ${err instanceof Error ? err.message : String(err)}`
|
|
87106
|
+
);
|
|
87107
|
+
}
|
|
87108
|
+
this.mergeRetryTimer = setTimeout(() => void schedule(), interval);
|
|
87109
|
+
}
|
|
86621
87110
|
}
|
|
86622
87111
|
};
|
|
86623
87112
|
this.mergeRetryTimer = setTimeout(() => void schedule(), 15e3);
|
|
@@ -89494,7 +89983,6 @@ function definePlugin(plugin4) {
|
|
|
89494
89983
|
var init_src3 = __esm({
|
|
89495
89984
|
"../plugin-sdk/src/index.ts"() {
|
|
89496
89985
|
"use strict";
|
|
89497
|
-
init_src();
|
|
89498
89986
|
}
|
|
89499
89987
|
});
|
|
89500
89988
|
|
|
@@ -96687,13 +97175,13 @@ var init_github_poll = __esm({
|
|
|
96687
97175
|
});
|
|
96688
97176
|
|
|
96689
97177
|
// ../dashboard/src/routes/resolve-diff-base.ts
|
|
96690
|
-
import { execFile as
|
|
97178
|
+
import { execFile as execFile6 } from "node:child_process";
|
|
96691
97179
|
import { promisify as promisify12 } from "node:util";
|
|
96692
|
-
var
|
|
97180
|
+
var execFileAsync4;
|
|
96693
97181
|
var init_resolve_diff_base = __esm({
|
|
96694
97182
|
"../dashboard/src/routes/resolve-diff-base.ts"() {
|
|
96695
97183
|
"use strict";
|
|
96696
|
-
|
|
97184
|
+
execFileAsync4 = promisify12(execFile6);
|
|
96697
97185
|
}
|
|
96698
97186
|
});
|
|
96699
97187
|
|
|
@@ -97052,13 +97540,13 @@ var init_register_agents_projects_nodes = __esm({
|
|
|
97052
97540
|
});
|
|
97053
97541
|
|
|
97054
97542
|
// ../dashboard/src/exec-file.ts
|
|
97055
|
-
import { execFile as
|
|
97543
|
+
import { execFile as execFile7 } from "node:child_process";
|
|
97056
97544
|
import { promisify as promisify13 } from "node:util";
|
|
97057
|
-
var
|
|
97545
|
+
var execFileAsync5;
|
|
97058
97546
|
var init_exec_file = __esm({
|
|
97059
97547
|
"../dashboard/src/exec-file.ts"() {
|
|
97060
97548
|
"use strict";
|
|
97061
|
-
|
|
97549
|
+
execFileAsync5 = promisify13(execFile7);
|
|
97062
97550
|
}
|
|
97063
97551
|
});
|
|
97064
97552
|
|
|
@@ -97327,9 +97815,9 @@ var init_claude_cli_probe = __esm({
|
|
|
97327
97815
|
}
|
|
97328
97816
|
});
|
|
97329
97817
|
|
|
97330
|
-
// ../../plugins/fusion-plugin-droid-runtime/
|
|
97818
|
+
// ../../plugins/fusion-plugin-droid-runtime/src/probe.ts
|
|
97331
97819
|
var init_probe3 = __esm({
|
|
97332
|
-
"../../plugins/fusion-plugin-droid-runtime/
|
|
97820
|
+
"../../plugins/fusion-plugin-droid-runtime/src/probe.ts"() {
|
|
97333
97821
|
"use strict";
|
|
97334
97822
|
}
|
|
97335
97823
|
});
|
|
@@ -97342,6 +97830,13 @@ var init_droid_cli_probe = __esm({
|
|
|
97342
97830
|
}
|
|
97343
97831
|
});
|
|
97344
97832
|
|
|
97833
|
+
// ../dashboard/src/llama-cpp-probe.ts
|
|
97834
|
+
var init_llama_cpp_probe = __esm({
|
|
97835
|
+
"../dashboard/src/llama-cpp-probe.ts"() {
|
|
97836
|
+
"use strict";
|
|
97837
|
+
}
|
|
97838
|
+
});
|
|
97839
|
+
|
|
97345
97840
|
// ../dashboard/src/routes/register-auth-routes.ts
|
|
97346
97841
|
var init_register_auth_routes = __esm({
|
|
97347
97842
|
"../dashboard/src/routes/register-auth-routes.ts"() {
|
|
@@ -97349,6 +97844,7 @@ var init_register_auth_routes = __esm({
|
|
|
97349
97844
|
init_src();
|
|
97350
97845
|
init_claude_cli_probe();
|
|
97351
97846
|
init_droid_cli_probe();
|
|
97847
|
+
init_llama_cpp_probe();
|
|
97352
97848
|
init_api_error();
|
|
97353
97849
|
init_usage();
|
|
97354
97850
|
init_project_store_resolver();
|