episoda 0.2.35 → 0.2.37
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/daemon/daemon-process.js +213 -504
- package/dist/daemon/daemon-process.js.map +1 -1
- package/dist/index.js +22 -0
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
|
@@ -397,6 +397,9 @@ var require_git_executor = __commonJS({
|
|
|
397
397
|
return await this.executeWorktreeList(cwd, options);
|
|
398
398
|
case "worktree_prune":
|
|
399
399
|
return await this.executeWorktreePrune(cwd, options);
|
|
400
|
+
// EP1002: Worktree setup command for unified orchestration
|
|
401
|
+
case "worktree_setup":
|
|
402
|
+
return await this.executeWorktreeSetup(command, cwd, options);
|
|
400
403
|
case "clone_bare":
|
|
401
404
|
return await this.executeCloneBare(command, options);
|
|
402
405
|
case "project_info":
|
|
@@ -1710,6 +1713,10 @@ var require_git_executor = __commonJS({
|
|
|
1710
1713
|
args.push(command.path);
|
|
1711
1714
|
const result = await this.runGitCommand(args, cwd, options);
|
|
1712
1715
|
if (result.success) {
|
|
1716
|
+
try {
|
|
1717
|
+
await fs17.rm(command.path, { recursive: true, force: true });
|
|
1718
|
+
} catch {
|
|
1719
|
+
}
|
|
1713
1720
|
return {
|
|
1714
1721
|
success: true,
|
|
1715
1722
|
output: `Removed worktree at ${command.path}`,
|
|
@@ -1823,6 +1830,18 @@ var require_git_executor = __commonJS({
|
|
|
1823
1830
|
};
|
|
1824
1831
|
}
|
|
1825
1832
|
}
|
|
1833
|
+
/**
|
|
1834
|
+
* EP1002: Worktree setup command stub
|
|
1835
|
+
* The actual implementation is in the daemon which intercepts this command.
|
|
1836
|
+
* This stub exists for type checking and as a fallback.
|
|
1837
|
+
*/
|
|
1838
|
+
async executeWorktreeSetup(command, cwd, options) {
|
|
1839
|
+
return {
|
|
1840
|
+
success: false,
|
|
1841
|
+
error: "NOT_IMPLEMENTED",
|
|
1842
|
+
output: "worktree_setup must be handled by the daemon, not GitExecutor"
|
|
1843
|
+
};
|
|
1844
|
+
}
|
|
1826
1845
|
/**
|
|
1827
1846
|
* EP944: Clone a repository as a bare repository
|
|
1828
1847
|
* Used for worktree-based development setup
|
|
@@ -2593,6 +2612,9 @@ var require_errors = __commonJS({
|
|
|
2593
2612
|
"WORKTREE_NOT_FOUND": "Worktree not found at this path",
|
|
2594
2613
|
"WORKTREE_LOCKED": "Worktree is locked",
|
|
2595
2614
|
"BRANCH_IN_USE": "Branch is already checked out in another worktree",
|
|
2615
|
+
// EP1002: Worktree setup error messages
|
|
2616
|
+
"NOT_IMPLEMENTED": "Command not implemented in this context",
|
|
2617
|
+
"SETUP_FAILED": "Worktree setup failed",
|
|
2596
2618
|
"UNKNOWN_ERROR": "Unknown error occurred"
|
|
2597
2619
|
};
|
|
2598
2620
|
let message = messages[code] || `Error: ${code}`;
|
|
@@ -2699,7 +2721,7 @@ var require_package = __commonJS({
|
|
|
2699
2721
|
"package.json"(exports2, module2) {
|
|
2700
2722
|
module2.exports = {
|
|
2701
2723
|
name: "episoda",
|
|
2702
|
-
version: "0.2.
|
|
2724
|
+
version: "0.2.36",
|
|
2703
2725
|
description: "CLI tool for Episoda local development workflow orchestration",
|
|
2704
2726
|
main: "dist/index.js",
|
|
2705
2727
|
types: "dist/index.d.ts",
|
|
@@ -6242,52 +6264,9 @@ async function getWorktreeInfoForModule(moduleUid) {
|
|
|
6242
6264
|
}
|
|
6243
6265
|
return getWorktreeInfo(moduleUid, config.workspace_slug, config.project_slug);
|
|
6244
6266
|
}
|
|
6245
|
-
async function getProjectRootPath() {
|
|
6246
|
-
const config = await (0, import_core9.loadConfig)();
|
|
6247
|
-
if (!config?.workspace_slug || !config?.project_slug) {
|
|
6248
|
-
return null;
|
|
6249
|
-
}
|
|
6250
|
-
return path15.join(
|
|
6251
|
-
getEpisodaRoot2(),
|
|
6252
|
-
config.workspace_slug,
|
|
6253
|
-
config.project_slug
|
|
6254
|
-
);
|
|
6255
|
-
}
|
|
6256
6267
|
|
|
6257
6268
|
// src/utils/port-allocator.ts
|
|
6258
|
-
var PORT_RANGE_START = 3100;
|
|
6259
|
-
var PORT_RANGE_END = 3199;
|
|
6260
|
-
var PORT_WARNING_THRESHOLD = 80;
|
|
6261
6269
|
var portAssignments = /* @__PURE__ */ new Map();
|
|
6262
|
-
function allocatePort(moduleUid) {
|
|
6263
|
-
const existing = portAssignments.get(moduleUid);
|
|
6264
|
-
if (existing) {
|
|
6265
|
-
return existing;
|
|
6266
|
-
}
|
|
6267
|
-
const usedPorts = new Set(portAssignments.values());
|
|
6268
|
-
if (usedPorts.size >= PORT_WARNING_THRESHOLD) {
|
|
6269
|
-
console.warn(
|
|
6270
|
-
`[PortAllocator] Warning: ${usedPorts.size}/${PORT_RANGE_END - PORT_RANGE_START + 1} ports allocated`
|
|
6271
|
-
);
|
|
6272
|
-
}
|
|
6273
|
-
for (let port = PORT_RANGE_START; port <= PORT_RANGE_END; port++) {
|
|
6274
|
-
if (!usedPorts.has(port)) {
|
|
6275
|
-
portAssignments.set(moduleUid, port);
|
|
6276
|
-
console.log(`[PortAllocator] Assigned port ${port} to ${moduleUid}`);
|
|
6277
|
-
return port;
|
|
6278
|
-
}
|
|
6279
|
-
}
|
|
6280
|
-
throw new Error(
|
|
6281
|
-
`No available ports in range ${PORT_RANGE_START}-${PORT_RANGE_END}. ${portAssignments.size} modules are using all available ports.`
|
|
6282
|
-
);
|
|
6283
|
-
}
|
|
6284
|
-
function releasePort(moduleUid) {
|
|
6285
|
-
const port = portAssignments.get(moduleUid);
|
|
6286
|
-
if (port) {
|
|
6287
|
-
portAssignments.delete(moduleUid);
|
|
6288
|
-
console.log(`[PortAllocator] Released port ${port} from ${moduleUid}`);
|
|
6289
|
-
}
|
|
6290
|
-
}
|
|
6291
6270
|
function clearAllPorts() {
|
|
6292
6271
|
const count = portAssignments.size;
|
|
6293
6272
|
portAssignments.clear();
|
|
@@ -6520,7 +6499,7 @@ var Daemon = class _Daemon {
|
|
|
6520
6499
|
// EP837: Prevent concurrent commit syncs (backpressure guard)
|
|
6521
6500
|
this.commitSyncInProgress = false;
|
|
6522
6501
|
// EP843: Per-module mutex for tunnel operations
|
|
6523
|
-
// Prevents race conditions between
|
|
6502
|
+
// EP1003: Prevents race conditions between server-orchestrated tunnel commands
|
|
6524
6503
|
this.tunnelOperationLocks = /* @__PURE__ */ new Map();
|
|
6525
6504
|
// moduleUid -> operation promise
|
|
6526
6505
|
// EP929: Health check polling interval (restored from EP843 removal)
|
|
@@ -6805,7 +6784,7 @@ var Daemon = class _Daemon {
|
|
|
6805
6784
|
* EP843: Acquire a per-module lock for tunnel operations
|
|
6806
6785
|
*
|
|
6807
6786
|
* Prevents race conditions between:
|
|
6808
|
-
* -
|
|
6787
|
+
* - Server-orchestrated tunnel_start commands (EP1003)
|
|
6809
6788
|
* - module_state_changed event handler
|
|
6810
6789
|
* - Multiple rapid state transitions
|
|
6811
6790
|
*
|
|
@@ -6898,6 +6877,32 @@ var Daemon = class _Daemon {
|
|
|
6898
6877
|
} else {
|
|
6899
6878
|
console.log(`[Daemon] Running git command in bare repo: ${bareRepoPath}`);
|
|
6900
6879
|
}
|
|
6880
|
+
if (gitCmd.action === "worktree_setup") {
|
|
6881
|
+
const wtManager = new WorktreeManager(projectPath);
|
|
6882
|
+
await wtManager.initialize();
|
|
6883
|
+
const SETUP_TIMEOUT_MS = 15 * 60 * 1e3;
|
|
6884
|
+
const setupPromise = this.handleWorktreeSetup(gitCmd, projectPath, wtManager);
|
|
6885
|
+
const timeoutPromise = new Promise(
|
|
6886
|
+
(_, reject) => setTimeout(() => reject(new Error("Worktree setup timed out after 15 minutes")), SETUP_TIMEOUT_MS)
|
|
6887
|
+
);
|
|
6888
|
+
let setupResult;
|
|
6889
|
+
try {
|
|
6890
|
+
setupResult = await Promise.race([setupPromise, timeoutPromise]);
|
|
6891
|
+
} catch (timeoutError) {
|
|
6892
|
+
setupResult = {
|
|
6893
|
+
success: false,
|
|
6894
|
+
error: "COMMAND_TIMEOUT",
|
|
6895
|
+
output: timeoutError instanceof Error ? timeoutError.message : "Setup timed out"
|
|
6896
|
+
};
|
|
6897
|
+
}
|
|
6898
|
+
await client.send({
|
|
6899
|
+
type: "result",
|
|
6900
|
+
commandId: message.id,
|
|
6901
|
+
result: setupResult
|
|
6902
|
+
});
|
|
6903
|
+
console.log(`[Daemon] EP1002: Worktree setup completed for ${gitCmd.moduleUid}:`, setupResult.success ? "success" : "failed");
|
|
6904
|
+
return;
|
|
6905
|
+
}
|
|
6901
6906
|
const result = await gitExecutor.execute(gitCmd, {
|
|
6902
6907
|
cwd
|
|
6903
6908
|
});
|
|
@@ -7321,9 +7326,6 @@ var Daemon = class _Daemon {
|
|
|
7321
7326
|
this.syncMachineProjectPath(projectId, projectPath).catch((err) => {
|
|
7322
7327
|
console.warn("[Daemon] EP995: Project path sync failed:", err.message);
|
|
7323
7328
|
});
|
|
7324
|
-
this.autoStartTunnelsForProject(projectPath, projectId).catch((error) => {
|
|
7325
|
-
console.error(`[Daemon] EP819: Failed to auto-start tunnels:`, error);
|
|
7326
|
-
});
|
|
7327
7329
|
cleanupStaleCommits(projectPath).then((cleanupResult) => {
|
|
7328
7330
|
if (cleanupResult.deleted_count > 0) {
|
|
7329
7331
|
console.log(`[Daemon] EP950: Cleaned up ${cleanupResult.deleted_count} stale commit(s) on connect`);
|
|
@@ -7331,8 +7333,8 @@ var Daemon = class _Daemon {
|
|
|
7331
7333
|
}).catch((err) => {
|
|
7332
7334
|
console.warn("[Daemon] EP950: Cleanup on connect failed:", err.message);
|
|
7333
7335
|
});
|
|
7334
|
-
this.reconcileWorktrees(projectId, projectPath).catch((err) => {
|
|
7335
|
-
console.warn("[Daemon]
|
|
7336
|
+
this.reconcileWorktrees(projectId, projectPath, client).catch((err) => {
|
|
7337
|
+
console.warn("[Daemon] EP1003: Reconciliation report failed:", err.message);
|
|
7336
7338
|
});
|
|
7337
7339
|
});
|
|
7338
7340
|
client.on("module_state_changed", async (message) => {
|
|
@@ -7340,174 +7342,18 @@ var Daemon = class _Daemon {
|
|
|
7340
7342
|
const { moduleUid, state, previousState, branchName, devMode, checkoutMachineId } = message;
|
|
7341
7343
|
console.log(`[Daemon] EP843: Module ${moduleUid} state changed: ${previousState} \u2192 ${state}`);
|
|
7342
7344
|
if (devMode !== "local") {
|
|
7343
|
-
console.log(`[Daemon]
|
|
7345
|
+
console.log(`[Daemon] EP1003: State change for non-local module ${moduleUid} (mode: ${devMode || "unknown"})`);
|
|
7344
7346
|
return;
|
|
7345
7347
|
}
|
|
7346
7348
|
if (checkoutMachineId && checkoutMachineId !== this.deviceId) {
|
|
7347
|
-
console.log(`[Daemon]
|
|
7349
|
+
console.log(`[Daemon] EP1003: State change for ${moduleUid} handled by different machine: ${checkoutMachineId}`);
|
|
7348
7350
|
return;
|
|
7349
7351
|
}
|
|
7350
|
-
|
|
7351
|
-
|
|
7352
|
-
|
|
7353
|
-
|
|
7354
|
-
|
|
7355
|
-
const startingWork = previousState === "ready" && state === "doing";
|
|
7356
|
-
const tunnelNotRunning = !tunnelManager.hasTunnel(moduleUid);
|
|
7357
|
-
const needsCrashRecovery = isInActiveZone && tunnelNotRunning;
|
|
7358
|
-
if (startingWork || needsCrashRecovery) {
|
|
7359
|
-
if (tunnelManager.hasTunnel(moduleUid)) {
|
|
7360
|
-
console.log(`[Daemon] EP843: Tunnel already running for ${moduleUid}, skipping start`);
|
|
7361
|
-
return;
|
|
7362
|
-
}
|
|
7363
|
-
console.log(`[Daemon] EP956: Starting tunnel for ${moduleUid} (${previousState} \u2192 ${state})`);
|
|
7364
|
-
try {
|
|
7365
|
-
let worktree = await getWorktreeInfoForModule(moduleUid);
|
|
7366
|
-
if (!worktree) {
|
|
7367
|
-
console.error(`[Daemon] EP956: Cannot resolve worktree path for ${moduleUid} (missing config slugs)`);
|
|
7368
|
-
return;
|
|
7369
|
-
}
|
|
7370
|
-
if (!worktree.exists && startingWork) {
|
|
7371
|
-
console.log(`[Daemon] EP959: Creating worktree for ${moduleUid} at ${worktree.path}`);
|
|
7372
|
-
const projectRoot = await getProjectRootPath();
|
|
7373
|
-
if (!projectRoot) {
|
|
7374
|
-
console.error(`[Daemon] EP959: Cannot determine project root for worktree creation`);
|
|
7375
|
-
return;
|
|
7376
|
-
}
|
|
7377
|
-
const worktreeManager = new WorktreeManager(projectRoot);
|
|
7378
|
-
const initialized = await worktreeManager.initialize();
|
|
7379
|
-
if (!initialized) {
|
|
7380
|
-
console.error(`[Daemon] EP959: Failed to initialize WorktreeManager at ${projectRoot}`);
|
|
7381
|
-
return;
|
|
7382
|
-
}
|
|
7383
|
-
const moduleBranchName = branchName || moduleUid;
|
|
7384
|
-
const createResult = await worktreeManager.createWorktree(moduleUid, moduleBranchName, true);
|
|
7385
|
-
if (!createResult.success) {
|
|
7386
|
-
console.error(`[Daemon] EP959: Failed to create worktree for ${moduleUid}: ${createResult.error}`);
|
|
7387
|
-
return;
|
|
7388
|
-
}
|
|
7389
|
-
console.log(`[Daemon] EP959: Worktree created for ${moduleUid} at ${createResult.worktreePath}`);
|
|
7390
|
-
if (this.deviceId) {
|
|
7391
|
-
try {
|
|
7392
|
-
const ownershipConfig = await (0, import_core10.loadConfig)();
|
|
7393
|
-
const ownershipApiUrl = ownershipConfig?.api_url || "https://episoda.dev";
|
|
7394
|
-
const ownershipResponse = await fetchWithAuth(`${ownershipApiUrl}/api/modules/${moduleUid}`, {
|
|
7395
|
-
method: "PATCH",
|
|
7396
|
-
body: JSON.stringify({ checkout_machine_id: this.deviceId })
|
|
7397
|
-
});
|
|
7398
|
-
if (ownershipResponse.ok) {
|
|
7399
|
-
console.log(`[Daemon] EP990: Claimed ownership of ${moduleUid} for device ${this.deviceId}`);
|
|
7400
|
-
} else {
|
|
7401
|
-
console.warn(`[Daemon] EP990: Failed to claim ownership of ${moduleUid}: ${ownershipResponse.status}`);
|
|
7402
|
-
}
|
|
7403
|
-
} catch (ownershipError) {
|
|
7404
|
-
console.warn(`[Daemon] EP990: Error claiming ownership of ${moduleUid}:`, ownershipError);
|
|
7405
|
-
}
|
|
7406
|
-
}
|
|
7407
|
-
worktree = await getWorktreeInfoForModule(moduleUid);
|
|
7408
|
-
if (!worktree || !worktree.exists) {
|
|
7409
|
-
console.error(`[Daemon] EP959: Worktree still not found after creation for ${moduleUid}`);
|
|
7410
|
-
return;
|
|
7411
|
-
}
|
|
7412
|
-
const worktreeConfig = await (0, import_core10.loadConfig)();
|
|
7413
|
-
const setupConfig = worktreeConfig?.project_settings;
|
|
7414
|
-
const envVars = await fetchEnvVars2();
|
|
7415
|
-
const hasEnvVars = Object.keys(envVars).length > 0;
|
|
7416
|
-
const hasSetupConfig = setupConfig?.worktree_copy_files?.length || setupConfig?.worktree_setup_script || hasEnvVars;
|
|
7417
|
-
{
|
|
7418
|
-
console.log(`[Daemon] EP986: Starting async worktree setup for ${moduleUid}${hasSetupConfig ? " (with config)" : " (for dependency installation)"}`);
|
|
7419
|
-
await worktreeManager.updateWorktreeStatus(moduleUid, "pending");
|
|
7420
|
-
await this.updateModuleWorktreeStatus(moduleUid, "pending", worktree.path);
|
|
7421
|
-
this.runWorktreeSetupAsync(
|
|
7422
|
-
moduleUid,
|
|
7423
|
-
worktreeManager,
|
|
7424
|
-
setupConfig?.worktree_copy_files || [],
|
|
7425
|
-
setupConfig?.worktree_setup_script,
|
|
7426
|
-
worktree.path,
|
|
7427
|
-
envVars
|
|
7428
|
-
// EP973: Use server-fetched env vars
|
|
7429
|
-
).then(() => {
|
|
7430
|
-
console.log(`[Daemon] EP959: Setup complete for ${moduleUid}, starting tunnel`);
|
|
7431
|
-
this.startTunnelForModule(moduleUid, worktree.path);
|
|
7432
|
-
}).catch((err) => {
|
|
7433
|
-
console.error(`[Daemon] EP959: Setup failed for ${moduleUid}:`, err);
|
|
7434
|
-
});
|
|
7435
|
-
return;
|
|
7436
|
-
}
|
|
7437
|
-
}
|
|
7438
|
-
if (!worktree.exists) {
|
|
7439
|
-
console.log(`[Daemon] EP956: No worktree for ${moduleUid} at ${worktree.path}, skipping tunnel`);
|
|
7440
|
-
return;
|
|
7441
|
-
}
|
|
7442
|
-
await this.updateModuleWorktreeStatus(moduleUid, "ready", worktree.path);
|
|
7443
|
-
const port = allocatePort(moduleUid);
|
|
7444
|
-
console.log(`[Daemon] EP956: Using worktree ${worktree.path} on port ${port}`);
|
|
7445
|
-
const devConfig = await (0, import_core10.loadConfig)();
|
|
7446
|
-
const devServerScript = devConfig?.project_settings?.worktree_dev_server_script;
|
|
7447
|
-
const devServerResult = await ensureDevServer(worktree.path, port, moduleUid, devServerScript);
|
|
7448
|
-
if (!devServerResult.success) {
|
|
7449
|
-
console.error(`[Daemon] EP956: Dev server failed for ${moduleUid}: ${devServerResult.error}`);
|
|
7450
|
-
releasePort(moduleUid);
|
|
7451
|
-
return;
|
|
7452
|
-
}
|
|
7453
|
-
const config2 = devConfig;
|
|
7454
|
-
const apiUrl = config2?.api_url || "https://episoda.dev";
|
|
7455
|
-
const startResult = await tunnelManager.startTunnel({
|
|
7456
|
-
moduleUid,
|
|
7457
|
-
port,
|
|
7458
|
-
onUrl: async (url) => {
|
|
7459
|
-
console.log(`[Daemon] EP956: Tunnel URL for ${moduleUid}: ${url}`);
|
|
7460
|
-
try {
|
|
7461
|
-
await fetchWithAuth(`${apiUrl}/api/modules/${moduleUid}/tunnel`, {
|
|
7462
|
-
method: "POST",
|
|
7463
|
-
body: JSON.stringify({ tunnel_url: url })
|
|
7464
|
-
});
|
|
7465
|
-
} catch (err) {
|
|
7466
|
-
console.warn(`[Daemon] EP956: Failed to report tunnel URL:`, err instanceof Error ? err.message : err);
|
|
7467
|
-
}
|
|
7468
|
-
},
|
|
7469
|
-
onStatusChange: (status, error) => {
|
|
7470
|
-
if (status === "error") {
|
|
7471
|
-
console.error(`[Daemon] EP956: Tunnel error for ${moduleUid}: ${error}`);
|
|
7472
|
-
}
|
|
7473
|
-
}
|
|
7474
|
-
});
|
|
7475
|
-
if (startResult.success) {
|
|
7476
|
-
console.log(`[Daemon] EP956: Tunnel started for ${moduleUid}`);
|
|
7477
|
-
} else {
|
|
7478
|
-
console.error(`[Daemon] EP956: Tunnel failed for ${moduleUid}: ${startResult.error}`);
|
|
7479
|
-
releasePort(moduleUid);
|
|
7480
|
-
}
|
|
7481
|
-
} catch (error) {
|
|
7482
|
-
console.error(`[Daemon] EP956: Error starting tunnel for ${moduleUid}:`, error);
|
|
7483
|
-
releasePort(moduleUid);
|
|
7484
|
-
}
|
|
7485
|
-
}
|
|
7486
|
-
if (state === "done" && wasInActiveZone) {
|
|
7487
|
-
console.log(`[Daemon] EP956: Stopping tunnel for ${moduleUid} (${previousState} \u2192 done)`);
|
|
7488
|
-
try {
|
|
7489
|
-
await tunnelManager.stopTunnel(moduleUid);
|
|
7490
|
-
releasePort(moduleUid);
|
|
7491
|
-
console.log(`[Daemon] EP956: Tunnel stopped and port released for ${moduleUid}`);
|
|
7492
|
-
const config2 = await (0, import_core10.loadConfig)();
|
|
7493
|
-
const apiUrl = config2?.api_url || "https://episoda.dev";
|
|
7494
|
-
try {
|
|
7495
|
-
await fetchWithAuth(`${apiUrl}/api/modules/${moduleUid}/tunnel`, {
|
|
7496
|
-
method: "POST",
|
|
7497
|
-
body: JSON.stringify({ tunnel_url: null })
|
|
7498
|
-
});
|
|
7499
|
-
} catch (err) {
|
|
7500
|
-
console.warn(`[Daemon] EP956: Failed to clear tunnel URL:`, err instanceof Error ? err.message : err);
|
|
7501
|
-
}
|
|
7502
|
-
this.cleanupModuleWorktree(moduleUid).catch((err) => {
|
|
7503
|
-
console.warn(`[Daemon] EP956: Async cleanup failed for ${moduleUid}:`, err instanceof Error ? err.message : err);
|
|
7504
|
-
});
|
|
7505
|
-
} catch (error) {
|
|
7506
|
-
console.error(`[Daemon] EP956: Error stopping tunnel for ${moduleUid}:`, error);
|
|
7507
|
-
releasePort(moduleUid);
|
|
7508
|
-
}
|
|
7509
|
-
}
|
|
7510
|
-
});
|
|
7352
|
+
if (previousState === "ready" && state === "doing") {
|
|
7353
|
+
console.log(`[Daemon] EP1003: Module ${moduleUid} entering doing - server will send tunnel_start`);
|
|
7354
|
+
} else if (state === "done") {
|
|
7355
|
+
console.log(`[Daemon] EP1003: Module ${moduleUid} entering done - server will send tunnel_stop`);
|
|
7356
|
+
}
|
|
7511
7357
|
}
|
|
7512
7358
|
});
|
|
7513
7359
|
client.on("error", (message) => {
|
|
@@ -7840,98 +7686,87 @@ var Daemon = class _Daemon {
|
|
|
7840
7686
|
* This self-healing mechanism catches modules that transitioned
|
|
7841
7687
|
* while the daemon was disconnected.
|
|
7842
7688
|
*/
|
|
7843
|
-
|
|
7844
|
-
|
|
7689
|
+
/**
|
|
7690
|
+
* EP1003: Report-only reconciliation
|
|
7691
|
+
* Daemon reports local state to server, server decides what commands to send.
|
|
7692
|
+
* This replaces autonomous worktree creation and tunnel starting.
|
|
7693
|
+
*/
|
|
7694
|
+
async reconcileWorktrees(projectId, projectPath, client) {
|
|
7695
|
+
console.log(`[Daemon] EP1003: Starting reconciliation report for project ${projectId}`);
|
|
7845
7696
|
try {
|
|
7846
7697
|
if (!this.deviceId) {
|
|
7847
|
-
console.log("[Daemon]
|
|
7698
|
+
console.log("[Daemon] EP1003: Cannot reconcile - deviceId not available yet");
|
|
7848
7699
|
return;
|
|
7849
7700
|
}
|
|
7850
7701
|
const config = await (0, import_core10.loadConfig)();
|
|
7851
7702
|
if (!config) return;
|
|
7852
7703
|
const apiUrl = config.api_url || "https://episoda.dev";
|
|
7853
|
-
const
|
|
7854
|
-
|
|
7855
|
-
|
|
7704
|
+
const controller = new AbortController();
|
|
7705
|
+
const timeoutId = setTimeout(() => controller.abort(), 1e4);
|
|
7706
|
+
let modulesResponse;
|
|
7707
|
+
try {
|
|
7708
|
+
modulesResponse = await fetchWithAuth(
|
|
7709
|
+
`${apiUrl}/api/modules?state=doing,review&dev_mode=local&checkout_machine_id=${this.deviceId}&project_id=${projectId}`,
|
|
7710
|
+
{ signal: controller.signal }
|
|
7711
|
+
);
|
|
7712
|
+
} finally {
|
|
7713
|
+
clearTimeout(timeoutId);
|
|
7714
|
+
}
|
|
7856
7715
|
if (!modulesResponse.ok) {
|
|
7857
|
-
console.warn(`[Daemon]
|
|
7716
|
+
console.warn(`[Daemon] EP1003: Failed to fetch modules for reconciliation: ${modulesResponse.status}`);
|
|
7858
7717
|
return;
|
|
7859
7718
|
}
|
|
7860
7719
|
const modulesData = await modulesResponse.json();
|
|
7861
7720
|
const modules = modulesData.modules || [];
|
|
7862
|
-
|
|
7863
|
-
|
|
7864
|
-
|
|
7865
|
-
|
|
7866
|
-
|
|
7867
|
-
const worktreeManager = new WorktreeManager(projectPath);
|
|
7868
|
-
const initialized = await worktreeManager.initialize();
|
|
7869
|
-
if (!initialized) {
|
|
7870
|
-
console.error(`[Daemon] EP995: Failed to initialize WorktreeManager`);
|
|
7871
|
-
return;
|
|
7872
|
-
}
|
|
7721
|
+
console.log(`[Daemon] EP1003: Building reconciliation report for ${modules.length} module(s)`);
|
|
7722
|
+
const tunnelManager = getTunnelManager();
|
|
7723
|
+
await tunnelManager.initialize();
|
|
7724
|
+
const moduleStatuses = [];
|
|
7725
|
+
const expectedModuleUids = new Set(modules.map((m) => m.uid));
|
|
7873
7726
|
for (const module2 of modules) {
|
|
7874
7727
|
const moduleUid = module2.uid;
|
|
7875
|
-
const branchName = module2.branch_name;
|
|
7876
7728
|
const worktree = await getWorktreeInfoForModule(moduleUid);
|
|
7877
|
-
|
|
7878
|
-
|
|
7879
|
-
|
|
7880
|
-
|
|
7881
|
-
|
|
7882
|
-
|
|
7883
|
-
|
|
7884
|
-
|
|
7885
|
-
|
|
7886
|
-
|
|
7887
|
-
|
|
7888
|
-
|
|
7889
|
-
|
|
7890
|
-
|
|
7891
|
-
|
|
7892
|
-
|
|
7893
|
-
|
|
7894
|
-
|
|
7895
|
-
|
|
7896
|
-
|
|
7897
|
-
|
|
7898
|
-
if (!newWorktree?.exists) {
|
|
7899
|
-
console.error(`[Daemon] EP995: Worktree still not found after creation`);
|
|
7900
|
-
continue;
|
|
7901
|
-
}
|
|
7902
|
-
const setupConfig = config.project_settings;
|
|
7903
|
-
const envVars = await fetchEnvVars2();
|
|
7904
|
-
console.log(`[Daemon] EP995: Starting setup for reconciled module ${moduleUid}`);
|
|
7905
|
-
await worktreeManager.updateWorktreeStatus(moduleUid, "pending");
|
|
7906
|
-
await this.updateModuleWorktreeStatus(moduleUid, "pending", newWorktree.path);
|
|
7907
|
-
this.runWorktreeSetupAsync(
|
|
7908
|
-
moduleUid,
|
|
7909
|
-
worktreeManager,
|
|
7910
|
-
setupConfig?.worktree_copy_files || [],
|
|
7911
|
-
setupConfig?.worktree_setup_script,
|
|
7912
|
-
newWorktree.path,
|
|
7913
|
-
envVars
|
|
7914
|
-
).then(() => {
|
|
7915
|
-
console.log(`[Daemon] EP995: Setup complete for reconciled ${moduleUid}`);
|
|
7916
|
-
this.startTunnelForModule(moduleUid, newWorktree.path);
|
|
7917
|
-
}).catch((err) => {
|
|
7918
|
-
console.error(`[Daemon] EP995: Setup failed for reconciled ${moduleUid}:`, err);
|
|
7729
|
+
const tunnelRunning = tunnelManager.hasTunnel(moduleUid);
|
|
7730
|
+
const tunnelInfo = tunnelManager.getTunnel(moduleUid);
|
|
7731
|
+
const status = {
|
|
7732
|
+
moduleUid,
|
|
7733
|
+
moduleState: module2.state,
|
|
7734
|
+
worktreeExists: worktree?.exists || false,
|
|
7735
|
+
worktreePath: worktree?.path,
|
|
7736
|
+
tunnelRunning,
|
|
7737
|
+
tunnelPort: tunnelInfo?.port
|
|
7738
|
+
};
|
|
7739
|
+
moduleStatuses.push(status);
|
|
7740
|
+
console.log(`[Daemon] EP1003: Module ${moduleUid}: worktree=${status.worktreeExists}, tunnel=${status.tunnelRunning}`);
|
|
7741
|
+
}
|
|
7742
|
+
const allTunnels = tunnelManager.getAllTunnels();
|
|
7743
|
+
const orphanTunnels = [];
|
|
7744
|
+
for (const tunnel of allTunnels) {
|
|
7745
|
+
if (!expectedModuleUids.has(tunnel.moduleUid)) {
|
|
7746
|
+
console.log(`[Daemon] EP1003: Detected orphan tunnel for ${tunnel.moduleUid} (port ${tunnel.port})`);
|
|
7747
|
+
orphanTunnels.push({
|
|
7748
|
+
moduleUid: tunnel.moduleUid,
|
|
7749
|
+
port: tunnel.port
|
|
7919
7750
|
});
|
|
7920
|
-
} else {
|
|
7921
|
-
await this.updateModuleWorktreeStatus(moduleUid, "ready", worktree.path);
|
|
7922
|
-
const tunnelManager = getTunnelManager();
|
|
7923
|
-
await tunnelManager.initialize();
|
|
7924
|
-
if (!tunnelManager.hasTunnel(moduleUid)) {
|
|
7925
|
-
console.log(`[Daemon] EP995: Module ${moduleUid} has worktree but no tunnel - starting...`);
|
|
7926
|
-
await this.startTunnelForModule(moduleUid, worktree.path);
|
|
7927
|
-
} else {
|
|
7928
|
-
console.log(`[Daemon] EP995: Module ${moduleUid} OK - worktree and tunnel exist`);
|
|
7929
|
-
}
|
|
7930
7751
|
}
|
|
7931
7752
|
}
|
|
7932
|
-
|
|
7753
|
+
if (orphanTunnels.length > 0) {
|
|
7754
|
+
console.log(`[Daemon] EP1003: Reporting ${orphanTunnels.length} orphan tunnel(s) for server cleanup`);
|
|
7755
|
+
}
|
|
7756
|
+
const report = {
|
|
7757
|
+
projectId,
|
|
7758
|
+
machineId: this.deviceId,
|
|
7759
|
+
modules: moduleStatuses,
|
|
7760
|
+
orphanTunnels: orphanTunnels.length > 0 ? orphanTunnels : void 0
|
|
7761
|
+
};
|
|
7762
|
+
console.log(`[Daemon] EP1003: Sending reconciliation report with ${moduleStatuses.length} module(s)`);
|
|
7763
|
+
await client.send({
|
|
7764
|
+
type: "reconciliation_report",
|
|
7765
|
+
report
|
|
7766
|
+
});
|
|
7767
|
+
console.log("[Daemon] EP1003: Reconciliation report sent - awaiting server commands");
|
|
7933
7768
|
} catch (error) {
|
|
7934
|
-
console.error("[Daemon]
|
|
7769
|
+
console.error("[Daemon] EP1003: Reconciliation error:", error instanceof Error ? error.message : error);
|
|
7935
7770
|
throw error;
|
|
7936
7771
|
}
|
|
7937
7772
|
}
|
|
@@ -7996,6 +7831,90 @@ var Daemon = class _Daemon {
|
|
|
7996
7831
|
throw error;
|
|
7997
7832
|
}
|
|
7998
7833
|
}
|
|
7834
|
+
/**
|
|
7835
|
+
* EP1002: Handle worktree_setup command from server
|
|
7836
|
+
* This provides a unified setup flow for both local and cloud environments.
|
|
7837
|
+
* Server orchestrates, daemon executes.
|
|
7838
|
+
*/
|
|
7839
|
+
async handleWorktreeSetup(command, projectPath, worktreeManager) {
|
|
7840
|
+
const { path: worktreePath, moduleUid } = command;
|
|
7841
|
+
console.log(`[Daemon] EP1002: Handling worktree_setup for ${moduleUid} at ${worktreePath}`);
|
|
7842
|
+
try {
|
|
7843
|
+
const envVars = await fetchEnvVars2();
|
|
7844
|
+
console.log(`[Daemon] EP1002: Fetched ${Object.keys(envVars).length} env vars for ${moduleUid}`);
|
|
7845
|
+
const config = await (0, import_core10.loadConfig)();
|
|
7846
|
+
const setupConfig = config?.project_settings;
|
|
7847
|
+
await this.runWorktreeSetupSync(
|
|
7848
|
+
moduleUid,
|
|
7849
|
+
worktreeManager,
|
|
7850
|
+
setupConfig?.worktree_copy_files || [],
|
|
7851
|
+
setupConfig?.worktree_setup_script,
|
|
7852
|
+
worktreePath,
|
|
7853
|
+
envVars
|
|
7854
|
+
);
|
|
7855
|
+
return {
|
|
7856
|
+
success: true,
|
|
7857
|
+
output: `Worktree setup completed for ${moduleUid}`,
|
|
7858
|
+
details: {
|
|
7859
|
+
moduleUid,
|
|
7860
|
+
worktreePath,
|
|
7861
|
+
envVarsCount: Object.keys(envVars).length
|
|
7862
|
+
}
|
|
7863
|
+
};
|
|
7864
|
+
} catch (error) {
|
|
7865
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
7866
|
+
console.error(`[Daemon] EP1002: Worktree setup failed for ${moduleUid}:`, errorMessage);
|
|
7867
|
+
return {
|
|
7868
|
+
success: false,
|
|
7869
|
+
error: "SETUP_FAILED",
|
|
7870
|
+
output: errorMessage
|
|
7871
|
+
};
|
|
7872
|
+
}
|
|
7873
|
+
}
|
|
7874
|
+
/**
|
|
7875
|
+
* EP1002: Synchronous worktree setup for command-driven flow
|
|
7876
|
+
* Similar to runWorktreeSetupAsync but runs synchronously for server orchestration
|
|
7877
|
+
*/
|
|
7878
|
+
async runWorktreeSetupSync(moduleUid, worktreeManager, copyFiles, setupScript, worktreePath, envVars = {}) {
|
|
7879
|
+
console.log(`[Daemon] EP1002: Running worktree setup for ${moduleUid}`);
|
|
7880
|
+
await worktreeManager.updateWorktreeStatus(moduleUid, "running");
|
|
7881
|
+
await this.updateModuleWorktreeStatus(moduleUid, "setup", worktreePath);
|
|
7882
|
+
if (Object.keys(envVars).length > 0) {
|
|
7883
|
+
console.log(`[Daemon] EP1002: Writing .env with ${Object.keys(envVars).length} variables`);
|
|
7884
|
+
writeEnvFile(worktreePath, envVars);
|
|
7885
|
+
}
|
|
7886
|
+
const installCmd = getInstallCommand(worktreePath);
|
|
7887
|
+
if (installCmd) {
|
|
7888
|
+
console.log(`[Daemon] EP1002: ${installCmd.description} (detected from ${installCmd.detectedFrom})`);
|
|
7889
|
+
console.log(`[Daemon] EP1002: Running: ${installCmd.command.join(" ")}`);
|
|
7890
|
+
try {
|
|
7891
|
+
const { execSync: execSync7 } = await import("child_process");
|
|
7892
|
+
execSync7(installCmd.command.join(" "), {
|
|
7893
|
+
cwd: worktreePath,
|
|
7894
|
+
stdio: "inherit",
|
|
7895
|
+
timeout: 10 * 60 * 1e3,
|
|
7896
|
+
// 10 minute timeout
|
|
7897
|
+
env: { ...process.env, CI: "true" }
|
|
7898
|
+
});
|
|
7899
|
+
console.log(`[Daemon] EP1002: Dependencies installed successfully`);
|
|
7900
|
+
} catch (installError) {
|
|
7901
|
+
const errorMsg = installError instanceof Error ? installError.message : String(installError);
|
|
7902
|
+
console.warn(`[Daemon] EP1002: Dependency installation failed (non-fatal): ${errorMsg}`);
|
|
7903
|
+
}
|
|
7904
|
+
} else {
|
|
7905
|
+
console.log(`[Daemon] EP1002: No package manager detected, skipping dependency installation`);
|
|
7906
|
+
}
|
|
7907
|
+
if (setupScript) {
|
|
7908
|
+
console.log(`[Daemon] EP1002: Running setup script`);
|
|
7909
|
+
const scriptResult = await worktreeManager.runSetupScript(moduleUid, setupScript);
|
|
7910
|
+
if (!scriptResult.success) {
|
|
7911
|
+
throw new Error(`Setup script failed: ${scriptResult.error}`);
|
|
7912
|
+
}
|
|
7913
|
+
}
|
|
7914
|
+
await worktreeManager.updateWorktreeStatus(moduleUid, "ready");
|
|
7915
|
+
await this.updateModuleWorktreeStatus(moduleUid, "ready", worktreePath);
|
|
7916
|
+
console.log(`[Daemon] EP1002: Worktree setup complete for ${moduleUid}`);
|
|
7917
|
+
}
|
|
7999
7918
|
/**
|
|
8000
7919
|
* EP959-11: Run worktree setup asynchronously
|
|
8001
7920
|
* EP964: Added envVars parameter to inject .env file
|
|
@@ -8059,222 +7978,10 @@ var Daemon = class _Daemon {
|
|
|
8059
7978
|
throw error;
|
|
8060
7979
|
}
|
|
8061
7980
|
}
|
|
8062
|
-
|
|
8063
|
-
|
|
8064
|
-
|
|
8065
|
-
|
|
8066
|
-
const tunnelManager = getTunnelManager();
|
|
8067
|
-
await tunnelManager.initialize();
|
|
8068
|
-
if (tunnelManager.hasTunnel(moduleUid)) {
|
|
8069
|
-
console.log(`[Daemon] EP959: Tunnel already running for ${moduleUid}`);
|
|
8070
|
-
return;
|
|
8071
|
-
}
|
|
8072
|
-
try {
|
|
8073
|
-
const config = await (0, import_core10.loadConfig)();
|
|
8074
|
-
const apiUrl = config?.api_url || "https://episoda.dev";
|
|
8075
|
-
const devServerScript = config?.project_settings?.worktree_dev_server_script;
|
|
8076
|
-
const port = allocatePort(moduleUid);
|
|
8077
|
-
console.log(`[Daemon] EP959: Post-setup tunnel start for ${moduleUid} on port ${port}`);
|
|
8078
|
-
const devServerResult = await ensureDevServer(worktreePath, port, moduleUid, devServerScript);
|
|
8079
|
-
if (!devServerResult.success) {
|
|
8080
|
-
console.error(`[Daemon] EP959: Dev server failed for ${moduleUid}: ${devServerResult.error}`);
|
|
8081
|
-
releasePort(moduleUid);
|
|
8082
|
-
return;
|
|
8083
|
-
}
|
|
8084
|
-
const startResult = await tunnelManager.startTunnel({
|
|
8085
|
-
moduleUid,
|
|
8086
|
-
port,
|
|
8087
|
-
onUrl: async (url) => {
|
|
8088
|
-
console.log(`[Daemon] EP959: Tunnel URL for ${moduleUid}: ${url}`);
|
|
8089
|
-
try {
|
|
8090
|
-
await fetchWithAuth(`${apiUrl}/api/modules/${moduleUid}/tunnel`, {
|
|
8091
|
-
method: "POST",
|
|
8092
|
-
body: JSON.stringify({ tunnel_url: url })
|
|
8093
|
-
});
|
|
8094
|
-
} catch (err) {
|
|
8095
|
-
console.warn(`[Daemon] EP959: Failed to report tunnel URL:`, err instanceof Error ? err.message : err);
|
|
8096
|
-
}
|
|
8097
|
-
},
|
|
8098
|
-
onStatusChange: (status, error) => {
|
|
8099
|
-
if (status === "error") {
|
|
8100
|
-
console.error(`[Daemon] EP959: Tunnel error for ${moduleUid}: ${error}`);
|
|
8101
|
-
}
|
|
8102
|
-
}
|
|
8103
|
-
});
|
|
8104
|
-
if (startResult.success) {
|
|
8105
|
-
console.log(`[Daemon] EP959: Tunnel started for ${moduleUid}`);
|
|
8106
|
-
} else {
|
|
8107
|
-
console.error(`[Daemon] EP959: Tunnel failed for ${moduleUid}: ${startResult.error}`);
|
|
8108
|
-
releasePort(moduleUid);
|
|
8109
|
-
}
|
|
8110
|
-
} catch (error) {
|
|
8111
|
-
console.error(`[Daemon] EP959: Error starting tunnel for ${moduleUid}:`, error);
|
|
8112
|
-
releasePort(moduleUid);
|
|
8113
|
-
}
|
|
8114
|
-
}
|
|
8115
|
-
/**
|
|
8116
|
-
* EP819: Auto-start tunnels for active local modules on daemon connect/reconnect
|
|
8117
|
-
*
|
|
8118
|
-
* Queries for modules in doing/review state with dev_mode=local that don't have
|
|
8119
|
-
* an active tunnel_url, and starts tunnels for each.
|
|
8120
|
-
*/
|
|
8121
|
-
async autoStartTunnelsForProject(projectPath, projectUid) {
|
|
8122
|
-
console.log(`[Daemon] EP819: Checking for active local modules to auto-start tunnels...`);
|
|
8123
|
-
try {
|
|
8124
|
-
const config = await (0, import_core10.loadConfig)();
|
|
8125
|
-
if (!config?.access_token) {
|
|
8126
|
-
console.warn(`[Daemon] EP819: No access token, skipping tunnel auto-start`);
|
|
8127
|
-
return;
|
|
8128
|
-
}
|
|
8129
|
-
const apiUrl = config.api_url || "https://episoda.dev";
|
|
8130
|
-
const response = await fetchWithAuth(
|
|
8131
|
-
`${apiUrl}/api/modules?state=doing,review&fields=id,uid,dev_mode,tunnel_url,checkout_machine_id`
|
|
8132
|
-
);
|
|
8133
|
-
if (!response.ok) {
|
|
8134
|
-
console.warn(`[Daemon] EP819: Failed to fetch modules: ${response.status}`);
|
|
8135
|
-
return;
|
|
8136
|
-
}
|
|
8137
|
-
const data = await response.json();
|
|
8138
|
-
const modules = data.modules || [];
|
|
8139
|
-
const tunnelManager = getTunnelManager();
|
|
8140
|
-
await tunnelManager.initialize();
|
|
8141
|
-
const activeTunnelUids = tunnelManager.getActiveModuleUids();
|
|
8142
|
-
const validModuleUids = new Set(
|
|
8143
|
-
modules.filter(
|
|
8144
|
-
(m) => m.dev_mode === "local" && (!m.checkout_machine_id || m.checkout_machine_id === this.deviceId)
|
|
8145
|
-
).map((m) => m.uid)
|
|
8146
|
-
);
|
|
8147
|
-
const orphanedTunnels = activeTunnelUids.filter((uid) => !validModuleUids.has(uid));
|
|
8148
|
-
if (orphanedTunnels.length > 0) {
|
|
8149
|
-
console.log(`[Daemon] EP956: Found ${orphanedTunnels.length} orphaned tunnels to stop: ${orphanedTunnels.join(", ")}`);
|
|
8150
|
-
for (const orphanUid of orphanedTunnels) {
|
|
8151
|
-
try {
|
|
8152
|
-
await tunnelManager.stopTunnel(orphanUid);
|
|
8153
|
-
releasePort(orphanUid);
|
|
8154
|
-
console.log(`[Daemon] EP956: Stopped orphaned tunnel and released port for ${orphanUid}`);
|
|
8155
|
-
try {
|
|
8156
|
-
await fetchWithAuth(`${apiUrl}/api/modules/${orphanUid}/tunnel`, {
|
|
8157
|
-
method: "POST",
|
|
8158
|
-
body: JSON.stringify({ tunnel_url: null })
|
|
8159
|
-
});
|
|
8160
|
-
} catch (err) {
|
|
8161
|
-
console.warn(`[Daemon] EP956: Failed to clear tunnel URL for ${orphanUid}:`, err instanceof Error ? err.message : err);
|
|
8162
|
-
}
|
|
8163
|
-
} catch (err) {
|
|
8164
|
-
console.error(`[Daemon] EP956: Failed to stop orphaned tunnel ${orphanUid}:`, err instanceof Error ? err.message : err);
|
|
8165
|
-
}
|
|
8166
|
-
}
|
|
8167
|
-
}
|
|
8168
|
-
const localModulesNeedingTunnel = modules.filter(
|
|
8169
|
-
(m) => m.dev_mode === "local" && (!m.checkout_machine_id || m.checkout_machine_id === this.deviceId) && !tunnelManager.hasTunnel(m.uid)
|
|
8170
|
-
);
|
|
8171
|
-
if (localModulesNeedingTunnel.length === 0) {
|
|
8172
|
-
console.log(`[Daemon] EP819: No local modules need tunnel auto-start`);
|
|
8173
|
-
return;
|
|
8174
|
-
}
|
|
8175
|
-
console.log(`[Daemon] EP956: Found ${localModulesNeedingTunnel.length} local modules needing tunnels`);
|
|
8176
|
-
for (const module2 of localModulesNeedingTunnel) {
|
|
8177
|
-
const moduleUid = module2.uid;
|
|
8178
|
-
const worktree = await getWorktreeInfoForModule(moduleUid);
|
|
8179
|
-
if (!worktree) {
|
|
8180
|
-
console.warn(`[Daemon] EP956: Cannot resolve worktree for ${moduleUid} (missing config slugs)`);
|
|
8181
|
-
continue;
|
|
8182
|
-
}
|
|
8183
|
-
if (!worktree.exists) {
|
|
8184
|
-
console.log(`[Daemon] EP956: No worktree for ${moduleUid} at ${worktree.path}, skipping`);
|
|
8185
|
-
continue;
|
|
8186
|
-
}
|
|
8187
|
-
const port = allocatePort(moduleUid);
|
|
8188
|
-
console.log(`[Daemon] EP956: Auto-starting tunnel for ${moduleUid} at ${worktree.path} on port ${port}`);
|
|
8189
|
-
const reportTunnelStatus = async (statusData) => {
|
|
8190
|
-
try {
|
|
8191
|
-
const statusResponse = await fetchWithAuth(`${apiUrl}/api/modules/${moduleUid}/tunnel`, {
|
|
8192
|
-
method: "POST",
|
|
8193
|
-
body: JSON.stringify(statusData)
|
|
8194
|
-
});
|
|
8195
|
-
if (statusResponse.ok) {
|
|
8196
|
-
console.log(`[Daemon] EP819: Tunnel status reported for ${moduleUid}`);
|
|
8197
|
-
} else {
|
|
8198
|
-
console.warn(`[Daemon] EP819: Failed to report tunnel status: ${statusResponse.statusText}`);
|
|
8199
|
-
}
|
|
8200
|
-
} catch (reportError) {
|
|
8201
|
-
console.warn(`[Daemon] EP819: Error reporting tunnel status:`, reportError);
|
|
8202
|
-
}
|
|
8203
|
-
};
|
|
8204
|
-
(async () => {
|
|
8205
|
-
await this.withTunnelLock(moduleUid, async () => {
|
|
8206
|
-
if (tunnelManager.hasTunnel(moduleUid)) {
|
|
8207
|
-
console.log(`[Daemon] EP956: Tunnel already running for ${moduleUid}, skipping auto-start`);
|
|
8208
|
-
return;
|
|
8209
|
-
}
|
|
8210
|
-
const MAX_RETRIES = 3;
|
|
8211
|
-
const RETRY_DELAY_MS = 3e3;
|
|
8212
|
-
await reportTunnelStatus({
|
|
8213
|
-
tunnel_started_at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
8214
|
-
tunnel_error: null
|
|
8215
|
-
});
|
|
8216
|
-
try {
|
|
8217
|
-
const devServerScript = config.project_settings?.worktree_dev_server_script;
|
|
8218
|
-
console.log(`[Daemon] EP956: Ensuring dev server is running for ${moduleUid} at ${worktree.path}...`);
|
|
8219
|
-
const devServerResult = await ensureDevServer(worktree.path, port, moduleUid, devServerScript);
|
|
8220
|
-
if (!devServerResult.success) {
|
|
8221
|
-
const errorMsg2 = `Dev server failed to start: ${devServerResult.error}`;
|
|
8222
|
-
console.error(`[Daemon] EP956: ${errorMsg2}`);
|
|
8223
|
-
await reportTunnelStatus({ tunnel_error: errorMsg2 });
|
|
8224
|
-
releasePort(moduleUid);
|
|
8225
|
-
return;
|
|
8226
|
-
}
|
|
8227
|
-
console.log(`[Daemon] EP956: Dev server ready on port ${port}`);
|
|
8228
|
-
let lastError;
|
|
8229
|
-
for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) {
|
|
8230
|
-
console.log(`[Daemon] EP819: Starting tunnel for ${moduleUid} (attempt ${attempt}/${MAX_RETRIES})...`);
|
|
8231
|
-
const startResult = await tunnelManager.startTunnel({
|
|
8232
|
-
moduleUid,
|
|
8233
|
-
port,
|
|
8234
|
-
onUrl: async (url) => {
|
|
8235
|
-
console.log(`[Daemon] EP819: Tunnel URL for ${moduleUid}: ${url}`);
|
|
8236
|
-
await reportTunnelStatus({
|
|
8237
|
-
tunnel_url: url,
|
|
8238
|
-
tunnel_error: null
|
|
8239
|
-
});
|
|
8240
|
-
},
|
|
8241
|
-
onStatusChange: (status, error) => {
|
|
8242
|
-
if (status === "error") {
|
|
8243
|
-
console.error(`[Daemon] EP819: Tunnel error for ${moduleUid}: ${error}`);
|
|
8244
|
-
reportTunnelStatus({ tunnel_error: error || "Tunnel connection error" });
|
|
8245
|
-
} else if (status === "reconnecting") {
|
|
8246
|
-
console.log(`[Daemon] EP819: Tunnel reconnecting for ${moduleUid}...`);
|
|
8247
|
-
}
|
|
8248
|
-
}
|
|
8249
|
-
});
|
|
8250
|
-
if (startResult.success) {
|
|
8251
|
-
console.log(`[Daemon] EP819: Tunnel started successfully for ${moduleUid}`);
|
|
8252
|
-
return;
|
|
8253
|
-
}
|
|
8254
|
-
lastError = startResult.error;
|
|
8255
|
-
console.warn(`[Daemon] EP819: Tunnel start attempt ${attempt} failed: ${lastError}`);
|
|
8256
|
-
if (attempt < MAX_RETRIES) {
|
|
8257
|
-
console.log(`[Daemon] EP819: Retrying in ${RETRY_DELAY_MS}ms...`);
|
|
8258
|
-
await new Promise((resolve3) => setTimeout(resolve3, RETRY_DELAY_MS));
|
|
8259
|
-
}
|
|
8260
|
-
}
|
|
8261
|
-
const errorMsg = `Tunnel failed after ${MAX_RETRIES} attempts: ${lastError}`;
|
|
8262
|
-
console.error(`[Daemon] EP956: ${errorMsg}`);
|
|
8263
|
-
await reportTunnelStatus({ tunnel_error: errorMsg });
|
|
8264
|
-
releasePort(moduleUid);
|
|
8265
|
-
} catch (error) {
|
|
8266
|
-
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
8267
|
-
console.error(`[Daemon] EP956: Async tunnel startup error:`, error);
|
|
8268
|
-
await reportTunnelStatus({ tunnel_error: errorMsg });
|
|
8269
|
-
releasePort(moduleUid);
|
|
8270
|
-
}
|
|
8271
|
-
});
|
|
8272
|
-
})();
|
|
8273
|
-
}
|
|
8274
|
-
} catch (error) {
|
|
8275
|
-
console.error(`[Daemon] EP819: Error auto-starting tunnels:`, error);
|
|
8276
|
-
}
|
|
8277
|
-
}
|
|
7981
|
+
// EP1003: startTunnelForModule removed - server now orchestrates via tunnel_start commands
|
|
7982
|
+
// EP1003: autoStartTunnelsForProject removed - server now orchestrates via reconciliation
|
|
7983
|
+
// Recovery flow: daemon sends reconciliation_report → server processes and sends commands
|
|
7984
|
+
// Orphan tunnel cleanup is now also handled server-side via reconciliation report
|
|
8278
7985
|
// EP843: startTunnelPolling() removed - replaced by push-based state sync
|
|
8279
7986
|
// See module_state_changed handler for the new implementation
|
|
8280
7987
|
/**
|
|
@@ -8593,7 +8300,9 @@ var Daemon = class _Daemon {
|
|
|
8593
8300
|
method: "POST",
|
|
8594
8301
|
body: JSON.stringify({
|
|
8595
8302
|
tunnel_url: url,
|
|
8596
|
-
tunnel_error: null
|
|
8303
|
+
tunnel_error: null,
|
|
8304
|
+
restart_reason: "health_check_failure"
|
|
8305
|
+
// EP1003: Server can track restart causes
|
|
8597
8306
|
})
|
|
8598
8307
|
});
|
|
8599
8308
|
} catch (e) {
|