episoda 0.2.41 → 0.2.42
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/daemon/daemon-process.js +1126 -450
- package/dist/daemon/daemon-process.js.map +1 -1
- package/dist/index.js +232 -539
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
|
@@ -6,16 +6,9 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
|
6
6
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
7
7
|
var __getProtoOf = Object.getPrototypeOf;
|
|
8
8
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
9
|
-
var __esm = (fn, res) => function __init() {
|
|
10
|
-
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
11
|
-
};
|
|
12
9
|
var __commonJS = (cb, mod) => function __require() {
|
|
13
10
|
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
|
14
11
|
};
|
|
15
|
-
var __export = (target, all) => {
|
|
16
|
-
for (var name in all)
|
|
17
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
18
|
-
};
|
|
19
12
|
var __copyProps = (to, from, except, desc) => {
|
|
20
13
|
if (from && typeof from === "object" || typeof from === "function") {
|
|
21
14
|
for (let key of __getOwnPropNames(from))
|
|
@@ -1559,15 +1552,15 @@ var require_git_executor = __commonJS({
|
|
|
1559
1552
|
try {
|
|
1560
1553
|
const { stdout: gitDir } = await execAsync2("git rev-parse --git-dir", { cwd, timeout: 5e3 });
|
|
1561
1554
|
const gitDirPath = gitDir.trim();
|
|
1562
|
-
const
|
|
1555
|
+
const fs18 = await Promise.resolve().then(() => __importStar(require("fs"))).then((m) => m.promises);
|
|
1563
1556
|
const rebaseMergePath = `${gitDirPath}/rebase-merge`;
|
|
1564
1557
|
const rebaseApplyPath = `${gitDirPath}/rebase-apply`;
|
|
1565
1558
|
try {
|
|
1566
|
-
await
|
|
1559
|
+
await fs18.access(rebaseMergePath);
|
|
1567
1560
|
inRebase = true;
|
|
1568
1561
|
} catch {
|
|
1569
1562
|
try {
|
|
1570
|
-
await
|
|
1563
|
+
await fs18.access(rebaseApplyPath);
|
|
1571
1564
|
inRebase = true;
|
|
1572
1565
|
} catch {
|
|
1573
1566
|
inRebase = false;
|
|
@@ -1621,9 +1614,9 @@ var require_git_executor = __commonJS({
|
|
|
1621
1614
|
error: validation.error || "UNKNOWN_ERROR"
|
|
1622
1615
|
};
|
|
1623
1616
|
}
|
|
1624
|
-
const
|
|
1617
|
+
const fs18 = await Promise.resolve().then(() => __importStar(require("fs"))).then((m) => m.promises);
|
|
1625
1618
|
try {
|
|
1626
|
-
await
|
|
1619
|
+
await fs18.access(command.path);
|
|
1627
1620
|
return {
|
|
1628
1621
|
success: false,
|
|
1629
1622
|
error: "WORKTREE_EXISTS",
|
|
@@ -1677,9 +1670,9 @@ var require_git_executor = __commonJS({
|
|
|
1677
1670
|
*/
|
|
1678
1671
|
async executeWorktreeRemove(command, cwd, options) {
|
|
1679
1672
|
try {
|
|
1680
|
-
const
|
|
1673
|
+
const fs18 = await Promise.resolve().then(() => __importStar(require("fs"))).then((m) => m.promises);
|
|
1681
1674
|
try {
|
|
1682
|
-
await
|
|
1675
|
+
await fs18.access(command.path);
|
|
1683
1676
|
} catch {
|
|
1684
1677
|
return {
|
|
1685
1678
|
success: false,
|
|
@@ -1714,7 +1707,7 @@ var require_git_executor = __commonJS({
|
|
|
1714
1707
|
const result = await this.runGitCommand(args, cwd, options);
|
|
1715
1708
|
if (result.success) {
|
|
1716
1709
|
try {
|
|
1717
|
-
await
|
|
1710
|
+
await fs18.rm(command.path, { recursive: true, force: true });
|
|
1718
1711
|
} catch {
|
|
1719
1712
|
}
|
|
1720
1713
|
return {
|
|
@@ -1848,10 +1841,10 @@ var require_git_executor = __commonJS({
|
|
|
1848
1841
|
*/
|
|
1849
1842
|
async executeCloneBare(command, options) {
|
|
1850
1843
|
try {
|
|
1851
|
-
const
|
|
1852
|
-
const
|
|
1844
|
+
const fs18 = await Promise.resolve().then(() => __importStar(require("fs"))).then((m) => m.promises);
|
|
1845
|
+
const path19 = await Promise.resolve().then(() => __importStar(require("path")));
|
|
1853
1846
|
try {
|
|
1854
|
-
await
|
|
1847
|
+
await fs18.access(command.path);
|
|
1855
1848
|
return {
|
|
1856
1849
|
success: false,
|
|
1857
1850
|
error: "BRANCH_ALREADY_EXISTS",
|
|
@@ -1860,9 +1853,9 @@ var require_git_executor = __commonJS({
|
|
|
1860
1853
|
};
|
|
1861
1854
|
} catch {
|
|
1862
1855
|
}
|
|
1863
|
-
const parentDir =
|
|
1856
|
+
const parentDir = path19.dirname(command.path);
|
|
1864
1857
|
try {
|
|
1865
|
-
await
|
|
1858
|
+
await fs18.mkdir(parentDir, { recursive: true });
|
|
1866
1859
|
} catch {
|
|
1867
1860
|
}
|
|
1868
1861
|
const { stdout, stderr } = await execAsync2(
|
|
@@ -1910,22 +1903,22 @@ var require_git_executor = __commonJS({
|
|
|
1910
1903
|
*/
|
|
1911
1904
|
async executeProjectInfo(cwd, options) {
|
|
1912
1905
|
try {
|
|
1913
|
-
const
|
|
1914
|
-
const
|
|
1906
|
+
const fs18 = await Promise.resolve().then(() => __importStar(require("fs"))).then((m) => m.promises);
|
|
1907
|
+
const path19 = await Promise.resolve().then(() => __importStar(require("path")));
|
|
1915
1908
|
let currentPath = cwd;
|
|
1916
1909
|
let projectPath = cwd;
|
|
1917
1910
|
let bareRepoPath;
|
|
1918
1911
|
for (let i = 0; i < 10; i++) {
|
|
1919
|
-
const bareDir =
|
|
1920
|
-
const episodaDir =
|
|
1912
|
+
const bareDir = path19.join(currentPath, ".bare");
|
|
1913
|
+
const episodaDir = path19.join(currentPath, ".episoda");
|
|
1921
1914
|
try {
|
|
1922
|
-
await
|
|
1923
|
-
await
|
|
1915
|
+
await fs18.access(bareDir);
|
|
1916
|
+
await fs18.access(episodaDir);
|
|
1924
1917
|
projectPath = currentPath;
|
|
1925
1918
|
bareRepoPath = bareDir;
|
|
1926
1919
|
break;
|
|
1927
1920
|
} catch {
|
|
1928
|
-
const parentPath =
|
|
1921
|
+
const parentPath = path19.dirname(currentPath);
|
|
1929
1922
|
if (parentPath === currentPath) {
|
|
1930
1923
|
break;
|
|
1931
1924
|
}
|
|
@@ -2514,36 +2507,36 @@ var require_auth = __commonJS({
|
|
|
2514
2507
|
};
|
|
2515
2508
|
})();
|
|
2516
2509
|
Object.defineProperty(exports2, "__esModule", { value: true });
|
|
2517
|
-
exports2.getConfigDir =
|
|
2510
|
+
exports2.getConfigDir = getConfigDir7;
|
|
2518
2511
|
exports2.getConfigPath = getConfigPath;
|
|
2519
|
-
exports2.loadConfig =
|
|
2512
|
+
exports2.loadConfig = loadConfig7;
|
|
2520
2513
|
exports2.saveConfig = saveConfig2;
|
|
2521
2514
|
exports2.validateToken = validateToken;
|
|
2522
|
-
var
|
|
2523
|
-
var
|
|
2515
|
+
var fs18 = __importStar(require("fs"));
|
|
2516
|
+
var path19 = __importStar(require("path"));
|
|
2524
2517
|
var os7 = __importStar(require("os"));
|
|
2525
2518
|
var child_process_1 = require("child_process");
|
|
2526
2519
|
var DEFAULT_CONFIG_FILE = "config.json";
|
|
2527
|
-
function
|
|
2528
|
-
return process.env.EPISODA_CONFIG_DIR ||
|
|
2520
|
+
function getConfigDir7() {
|
|
2521
|
+
return process.env.EPISODA_CONFIG_DIR || path19.join(os7.homedir(), ".episoda");
|
|
2529
2522
|
}
|
|
2530
2523
|
function getConfigPath(configPath) {
|
|
2531
2524
|
if (configPath) {
|
|
2532
2525
|
return configPath;
|
|
2533
2526
|
}
|
|
2534
|
-
return
|
|
2527
|
+
return path19.join(getConfigDir7(), DEFAULT_CONFIG_FILE);
|
|
2535
2528
|
}
|
|
2536
2529
|
function ensureConfigDir(configPath) {
|
|
2537
|
-
const dir =
|
|
2538
|
-
const isNew = !
|
|
2530
|
+
const dir = path19.dirname(configPath);
|
|
2531
|
+
const isNew = !fs18.existsSync(dir);
|
|
2539
2532
|
if (isNew) {
|
|
2540
|
-
|
|
2533
|
+
fs18.mkdirSync(dir, { recursive: true, mode: 448 });
|
|
2541
2534
|
}
|
|
2542
2535
|
if (process.platform === "darwin") {
|
|
2543
|
-
const nosyncPath =
|
|
2544
|
-
if (isNew || !
|
|
2536
|
+
const nosyncPath = path19.join(dir, ".nosync");
|
|
2537
|
+
if (isNew || !fs18.existsSync(nosyncPath)) {
|
|
2545
2538
|
try {
|
|
2546
|
-
|
|
2539
|
+
fs18.writeFileSync(nosyncPath, "", { mode: 384 });
|
|
2547
2540
|
(0, child_process_1.execSync)(`xattr -w com.apple.fileprovider.ignore 1 "${dir}"`, {
|
|
2548
2541
|
stdio: "ignore",
|
|
2549
2542
|
timeout: 5e3
|
|
@@ -2553,13 +2546,13 @@ var require_auth = __commonJS({
|
|
|
2553
2546
|
}
|
|
2554
2547
|
}
|
|
2555
2548
|
}
|
|
2556
|
-
async function
|
|
2549
|
+
async function loadConfig7(configPath) {
|
|
2557
2550
|
const fullPath = getConfigPath(configPath);
|
|
2558
|
-
if (!
|
|
2551
|
+
if (!fs18.existsSync(fullPath)) {
|
|
2559
2552
|
return null;
|
|
2560
2553
|
}
|
|
2561
2554
|
try {
|
|
2562
|
-
const content =
|
|
2555
|
+
const content = fs18.readFileSync(fullPath, "utf8");
|
|
2563
2556
|
const config = JSON.parse(content);
|
|
2564
2557
|
return config;
|
|
2565
2558
|
} catch (error) {
|
|
@@ -2572,7 +2565,7 @@ var require_auth = __commonJS({
|
|
|
2572
2565
|
ensureConfigDir(fullPath);
|
|
2573
2566
|
try {
|
|
2574
2567
|
const content = JSON.stringify(config, null, 2);
|
|
2575
|
-
|
|
2568
|
+
fs18.writeFileSync(fullPath, content, { mode: 384 });
|
|
2576
2569
|
} catch (error) {
|
|
2577
2570
|
throw new Error(`Failed to save config: ${error instanceof Error ? error.message : String(error)}`);
|
|
2578
2571
|
}
|
|
@@ -2684,49 +2677,12 @@ var require_dist = __commonJS({
|
|
|
2684
2677
|
}
|
|
2685
2678
|
});
|
|
2686
2679
|
|
|
2687
|
-
// src/utils/port-check.ts
|
|
2688
|
-
var port_check_exports = {};
|
|
2689
|
-
__export(port_check_exports, {
|
|
2690
|
-
getServerPort: () => getServerPort,
|
|
2691
|
-
isPortInUse: () => isPortInUse
|
|
2692
|
-
});
|
|
2693
|
-
async function isPortInUse(port) {
|
|
2694
|
-
return new Promise((resolve3) => {
|
|
2695
|
-
const server = net2.createServer();
|
|
2696
|
-
server.once("error", (err) => {
|
|
2697
|
-
if (err.code === "EADDRINUSE") {
|
|
2698
|
-
resolve3(true);
|
|
2699
|
-
} else {
|
|
2700
|
-
resolve3(false);
|
|
2701
|
-
}
|
|
2702
|
-
});
|
|
2703
|
-
server.once("listening", () => {
|
|
2704
|
-
server.close();
|
|
2705
|
-
resolve3(false);
|
|
2706
|
-
});
|
|
2707
|
-
server.listen(port);
|
|
2708
|
-
});
|
|
2709
|
-
}
|
|
2710
|
-
function getServerPort() {
|
|
2711
|
-
if (process.env.PORT) {
|
|
2712
|
-
return parseInt(process.env.PORT, 10);
|
|
2713
|
-
}
|
|
2714
|
-
return 3e3;
|
|
2715
|
-
}
|
|
2716
|
-
var net2;
|
|
2717
|
-
var init_port_check = __esm({
|
|
2718
|
-
"src/utils/port-check.ts"() {
|
|
2719
|
-
"use strict";
|
|
2720
|
-
net2 = __toESM(require("net"));
|
|
2721
|
-
}
|
|
2722
|
-
});
|
|
2723
|
-
|
|
2724
2680
|
// package.json
|
|
2725
2681
|
var require_package = __commonJS({
|
|
2726
2682
|
"package.json"(exports2, module2) {
|
|
2727
2683
|
module2.exports = {
|
|
2728
2684
|
name: "episoda",
|
|
2729
|
-
version: "0.2.
|
|
2685
|
+
version: "0.2.42",
|
|
2730
2686
|
description: "CLI tool for Episoda local development workflow orchestration",
|
|
2731
2687
|
main: "dist/index.js",
|
|
2732
2688
|
types: "dist/index.d.ts",
|
|
@@ -3056,8 +3012,8 @@ var IPCServer = class {
|
|
|
3056
3012
|
const message = buffer.slice(0, newlineIndex);
|
|
3057
3013
|
buffer = buffer.slice(newlineIndex + 1);
|
|
3058
3014
|
try {
|
|
3059
|
-
const
|
|
3060
|
-
const response = await this.handleRequest(
|
|
3015
|
+
const request2 = JSON.parse(message);
|
|
3016
|
+
const response = await this.handleRequest(request2);
|
|
3061
3017
|
socket.write(JSON.stringify(response) + "\n");
|
|
3062
3018
|
} catch (error) {
|
|
3063
3019
|
const errorResponse = {
|
|
@@ -3075,25 +3031,25 @@ var IPCServer = class {
|
|
|
3075
3031
|
/**
|
|
3076
3032
|
* Handle IPC request
|
|
3077
3033
|
*/
|
|
3078
|
-
async handleRequest(
|
|
3079
|
-
const handler = this.handlers.get(
|
|
3034
|
+
async handleRequest(request2) {
|
|
3035
|
+
const handler = this.handlers.get(request2.command);
|
|
3080
3036
|
if (!handler) {
|
|
3081
3037
|
return {
|
|
3082
|
-
id:
|
|
3038
|
+
id: request2.id,
|
|
3083
3039
|
success: false,
|
|
3084
|
-
error: `Unknown command: ${
|
|
3040
|
+
error: `Unknown command: ${request2.command}`
|
|
3085
3041
|
};
|
|
3086
3042
|
}
|
|
3087
3043
|
try {
|
|
3088
|
-
const data = await handler(
|
|
3044
|
+
const data = await handler(request2.params);
|
|
3089
3045
|
return {
|
|
3090
|
-
id:
|
|
3046
|
+
id: request2.id,
|
|
3091
3047
|
success: true,
|
|
3092
3048
|
data
|
|
3093
3049
|
};
|
|
3094
3050
|
} catch (error) {
|
|
3095
3051
|
return {
|
|
3096
|
-
id:
|
|
3052
|
+
id: request2.id,
|
|
3097
3053
|
success: false,
|
|
3098
3054
|
error: error instanceof Error ? error.message : "Unknown error"
|
|
3099
3055
|
};
|
|
@@ -3102,7 +3058,7 @@ var IPCServer = class {
|
|
|
3102
3058
|
};
|
|
3103
3059
|
|
|
3104
3060
|
// src/daemon/daemon-process.ts
|
|
3105
|
-
var
|
|
3061
|
+
var import_core11 = __toESM(require_dist());
|
|
3106
3062
|
|
|
3107
3063
|
// src/utils/update-checker.ts
|
|
3108
3064
|
var import_child_process2 = require("child_process");
|
|
@@ -4045,8 +4001,8 @@ async function provisionNamedTunnel(moduleId) {
|
|
|
4045
4001
|
}
|
|
4046
4002
|
return {
|
|
4047
4003
|
success: true,
|
|
4048
|
-
tunnel: data.
|
|
4049
|
-
message: data.
|
|
4004
|
+
tunnel: data.tunnel,
|
|
4005
|
+
message: data.message
|
|
4050
4006
|
};
|
|
4051
4007
|
} catch (error) {
|
|
4052
4008
|
return {
|
|
@@ -4074,11 +4030,11 @@ async function provisionNamedTunnelByUid(moduleUid) {
|
|
|
4074
4030
|
};
|
|
4075
4031
|
}
|
|
4076
4032
|
const moduleData = await moduleResponse.json();
|
|
4077
|
-
const moduleId = moduleData.
|
|
4033
|
+
const moduleId = moduleData.moduleRecord?.id;
|
|
4078
4034
|
if (!moduleId) {
|
|
4079
4035
|
return {
|
|
4080
4036
|
success: false,
|
|
4081
|
-
error: `Module ${moduleUid} has no ID`
|
|
4037
|
+
error: `Module ${moduleUid} has no ID (response keys: ${JSON.stringify(Object.keys(moduleData))})`
|
|
4082
4038
|
};
|
|
4083
4039
|
}
|
|
4084
4040
|
return provisionNamedTunnel(moduleId);
|
|
@@ -5375,11 +5331,57 @@ var AgentManager = class {
|
|
|
5375
5331
|
}
|
|
5376
5332
|
};
|
|
5377
5333
|
|
|
5378
|
-
// src/
|
|
5334
|
+
// src/preview/types.ts
|
|
5335
|
+
var DEV_SERVER_CONSTANTS = {
|
|
5336
|
+
/** Maximum restart attempts before giving up */
|
|
5337
|
+
MAX_RESTART_ATTEMPTS: 5,
|
|
5338
|
+
/** Initial delay before first restart (ms) */
|
|
5339
|
+
INITIAL_RESTART_DELAY_MS: 2e3,
|
|
5340
|
+
/** Maximum delay between restarts (ms) */
|
|
5341
|
+
MAX_RESTART_DELAY_MS: 3e4,
|
|
5342
|
+
/** Maximum log file size before rotation (bytes) */
|
|
5343
|
+
MAX_LOG_SIZE_BYTES: 5 * 1024 * 1024,
|
|
5344
|
+
// 5MB
|
|
5345
|
+
/** Node.js memory limit (MB) */
|
|
5346
|
+
NODE_MEMORY_LIMIT_MB: 2048,
|
|
5347
|
+
/** Timeout waiting for server to start (ms) */
|
|
5348
|
+
STARTUP_TIMEOUT_MS: 6e4,
|
|
5349
|
+
/** Timeout for health check requests (ms) */
|
|
5350
|
+
HEALTH_CHECK_TIMEOUT_MS: 5e3
|
|
5351
|
+
};
|
|
5352
|
+
|
|
5353
|
+
// src/preview/preview-manager.ts
|
|
5354
|
+
var import_events3 = require("events");
|
|
5355
|
+
var import_fs = require("fs");
|
|
5356
|
+
|
|
5357
|
+
// src/preview/dev-server-runner.ts
|
|
5379
5358
|
var import_child_process9 = require("child_process");
|
|
5380
|
-
|
|
5359
|
+
var http = __toESM(require("http"));
|
|
5360
|
+
var fs11 = __toESM(require("fs"));
|
|
5361
|
+
var path12 = __toESM(require("path"));
|
|
5362
|
+
var import_events2 = require("events");
|
|
5381
5363
|
var import_core7 = __toESM(require_dist());
|
|
5382
5364
|
|
|
5365
|
+
// src/utils/port-check.ts
|
|
5366
|
+
var net2 = __toESM(require("net"));
|
|
5367
|
+
async function isPortInUse(port) {
|
|
5368
|
+
return new Promise((resolve3) => {
|
|
5369
|
+
const server = net2.createServer();
|
|
5370
|
+
server.once("error", (err) => {
|
|
5371
|
+
if (err.code === "EADDRINUSE") {
|
|
5372
|
+
resolve3(true);
|
|
5373
|
+
} else {
|
|
5374
|
+
resolve3(false);
|
|
5375
|
+
}
|
|
5376
|
+
});
|
|
5377
|
+
server.once("listening", () => {
|
|
5378
|
+
server.close();
|
|
5379
|
+
resolve3(false);
|
|
5380
|
+
});
|
|
5381
|
+
server.listen(port);
|
|
5382
|
+
});
|
|
5383
|
+
}
|
|
5384
|
+
|
|
5383
5385
|
// src/utils/env-cache.ts
|
|
5384
5386
|
var fs10 = __toESM(require("fs"));
|
|
5385
5387
|
var path11 = __toESM(require("path"));
|
|
@@ -5531,10 +5533,809 @@ No cached values available as fallback.`
|
|
|
5531
5533
|
}
|
|
5532
5534
|
}
|
|
5533
5535
|
|
|
5536
|
+
// src/preview/dev-server-runner.ts
|
|
5537
|
+
var DevServerRunner = class extends import_events2.EventEmitter {
|
|
5538
|
+
constructor() {
|
|
5539
|
+
super();
|
|
5540
|
+
this.servers = /* @__PURE__ */ new Map();
|
|
5541
|
+
}
|
|
5542
|
+
/**
|
|
5543
|
+
* Start a dev server for a module
|
|
5544
|
+
*/
|
|
5545
|
+
async start(config) {
|
|
5546
|
+
const {
|
|
5547
|
+
projectPath,
|
|
5548
|
+
port,
|
|
5549
|
+
moduleUid,
|
|
5550
|
+
customCommand,
|
|
5551
|
+
autoRestart = true
|
|
5552
|
+
} = config;
|
|
5553
|
+
if (await isPortInUse(port)) {
|
|
5554
|
+
console.log(`[DevServerRunner] Server already running on port ${port}`);
|
|
5555
|
+
return { success: true, alreadyRunning: true };
|
|
5556
|
+
}
|
|
5557
|
+
const existing = this.servers.get(moduleUid);
|
|
5558
|
+
if (existing && !existing.process.killed) {
|
|
5559
|
+
console.log(`[DevServerRunner] Process already exists for ${moduleUid}`);
|
|
5560
|
+
return { success: true, alreadyRunning: true };
|
|
5561
|
+
}
|
|
5562
|
+
console.log(`[DevServerRunner] Starting dev server for ${moduleUid} on port ${port}...`);
|
|
5563
|
+
const injectedEnvVars = await this.fetchEnvVars(projectPath);
|
|
5564
|
+
try {
|
|
5565
|
+
const logPath = this.getLogFilePath(moduleUid);
|
|
5566
|
+
const process2 = this.spawnProcess(projectPath, port, moduleUid, logPath, customCommand, injectedEnvVars);
|
|
5567
|
+
const state = {
|
|
5568
|
+
process: process2,
|
|
5569
|
+
moduleUid,
|
|
5570
|
+
projectPath,
|
|
5571
|
+
port,
|
|
5572
|
+
startedAt: /* @__PURE__ */ new Date(),
|
|
5573
|
+
restartCount: 0,
|
|
5574
|
+
lastRestartAt: null,
|
|
5575
|
+
autoRestartEnabled: autoRestart,
|
|
5576
|
+
logFile: logPath,
|
|
5577
|
+
customCommand,
|
|
5578
|
+
injectedEnvVars
|
|
5579
|
+
};
|
|
5580
|
+
this.servers.set(moduleUid, state);
|
|
5581
|
+
this.writeToLog(logPath, `Starting dev server on port ${port}`);
|
|
5582
|
+
this.setupProcessHandlers(moduleUid, process2, logPath);
|
|
5583
|
+
console.log(`[DevServerRunner] Waiting for server on port ${port}...`);
|
|
5584
|
+
const ready = await this.waitForPort(port, DEV_SERVER_CONSTANTS.STARTUP_TIMEOUT_MS);
|
|
5585
|
+
if (!ready) {
|
|
5586
|
+
process2.kill();
|
|
5587
|
+
this.servers.delete(moduleUid);
|
|
5588
|
+
this.writeToLog(logPath, "Failed to start within timeout", true);
|
|
5589
|
+
return { success: false, error: "Dev server failed to start within timeout" };
|
|
5590
|
+
}
|
|
5591
|
+
console.log(`[DevServerRunner] Server started successfully on port ${port}`);
|
|
5592
|
+
this.writeToLog(logPath, "Server started successfully");
|
|
5593
|
+
this.emit("started", moduleUid, port);
|
|
5594
|
+
return { success: true };
|
|
5595
|
+
} catch (error) {
|
|
5596
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
5597
|
+
console.error(`[DevServerRunner] Failed to start:`, error);
|
|
5598
|
+
return { success: false, error: errorMsg };
|
|
5599
|
+
}
|
|
5600
|
+
}
|
|
5601
|
+
/**
|
|
5602
|
+
* Stop a dev server
|
|
5603
|
+
*/
|
|
5604
|
+
async stop(moduleUid) {
|
|
5605
|
+
const state = this.servers.get(moduleUid);
|
|
5606
|
+
if (!state) {
|
|
5607
|
+
return;
|
|
5608
|
+
}
|
|
5609
|
+
state.autoRestartEnabled = false;
|
|
5610
|
+
if (!state.process.killed) {
|
|
5611
|
+
console.log(`[DevServerRunner] Stopping server for ${moduleUid}`);
|
|
5612
|
+
this.writeToLog(state.logFile, "Stopping server (manual stop)");
|
|
5613
|
+
state.process.kill("SIGTERM");
|
|
5614
|
+
await this.wait(2e3);
|
|
5615
|
+
if (!state.process.killed) {
|
|
5616
|
+
state.process.kill("SIGKILL");
|
|
5617
|
+
}
|
|
5618
|
+
}
|
|
5619
|
+
this.servers.delete(moduleUid);
|
|
5620
|
+
this.emit("stopped", moduleUid);
|
|
5621
|
+
}
|
|
5622
|
+
/**
|
|
5623
|
+
* Restart a dev server
|
|
5624
|
+
*/
|
|
5625
|
+
async restart(moduleUid) {
|
|
5626
|
+
const state = this.servers.get(moduleUid);
|
|
5627
|
+
if (!state) {
|
|
5628
|
+
return { success: false, error: `No dev server found for ${moduleUid}` };
|
|
5629
|
+
}
|
|
5630
|
+
const { projectPath, port, autoRestartEnabled, customCommand, logFile } = state;
|
|
5631
|
+
console.log(`[DevServerRunner] Restarting server for ${moduleUid}...`);
|
|
5632
|
+
this.writeToLog(logFile, "Manual restart requested");
|
|
5633
|
+
await this.stop(moduleUid);
|
|
5634
|
+
await this.wait(1e3);
|
|
5635
|
+
if (await isPortInUse(port)) {
|
|
5636
|
+
await this.killProcessOnPort(port);
|
|
5637
|
+
}
|
|
5638
|
+
return this.start({
|
|
5639
|
+
projectPath,
|
|
5640
|
+
port,
|
|
5641
|
+
moduleUid,
|
|
5642
|
+
customCommand,
|
|
5643
|
+
autoRestart: autoRestartEnabled
|
|
5644
|
+
});
|
|
5645
|
+
}
|
|
5646
|
+
/**
|
|
5647
|
+
* Check if a dev server is healthy (responding to HTTP requests)
|
|
5648
|
+
*/
|
|
5649
|
+
async isHealthy(moduleUid) {
|
|
5650
|
+
const state = this.servers.get(moduleUid);
|
|
5651
|
+
if (!state) {
|
|
5652
|
+
return false;
|
|
5653
|
+
}
|
|
5654
|
+
return this.checkHealth(state.port);
|
|
5655
|
+
}
|
|
5656
|
+
/**
|
|
5657
|
+
* Check if a dev server is running
|
|
5658
|
+
*/
|
|
5659
|
+
isRunning(moduleUid) {
|
|
5660
|
+
const state = this.servers.get(moduleUid);
|
|
5661
|
+
return !!state && !state.process.killed;
|
|
5662
|
+
}
|
|
5663
|
+
/**
|
|
5664
|
+
* Get status of a specific dev server
|
|
5665
|
+
*/
|
|
5666
|
+
getStatus(moduleUid) {
|
|
5667
|
+
const state = this.servers.get(moduleUid);
|
|
5668
|
+
if (!state) {
|
|
5669
|
+
return void 0;
|
|
5670
|
+
}
|
|
5671
|
+
return this.stateToStatus(state);
|
|
5672
|
+
}
|
|
5673
|
+
/**
|
|
5674
|
+
* Get status of all dev servers
|
|
5675
|
+
*/
|
|
5676
|
+
getAllStatus() {
|
|
5677
|
+
return Array.from(this.servers.values()).map((s) => this.stateToStatus(s));
|
|
5678
|
+
}
|
|
5679
|
+
/**
|
|
5680
|
+
* Stop all dev servers
|
|
5681
|
+
*/
|
|
5682
|
+
async stopAll() {
|
|
5683
|
+
const uids = Array.from(this.servers.keys());
|
|
5684
|
+
await Promise.all(uids.map((uid) => this.stop(uid)));
|
|
5685
|
+
}
|
|
5686
|
+
/**
|
|
5687
|
+
* Ensure a dev server is running, starting if needed
|
|
5688
|
+
*
|
|
5689
|
+
* Note: start() already handles the case where the port is in use,
|
|
5690
|
+
* returning { success: true, alreadyRunning: true }.
|
|
5691
|
+
*/
|
|
5692
|
+
async ensure(config) {
|
|
5693
|
+
return this.start({ ...config, autoRestart: true });
|
|
5694
|
+
}
|
|
5695
|
+
/**
|
|
5696
|
+
* Kill any process on a specific port
|
|
5697
|
+
*/
|
|
5698
|
+
async killProcessOnPort(port) {
|
|
5699
|
+
try {
|
|
5700
|
+
const result = (0, import_child_process9.execSync)(`lsof -ti:${port} 2>/dev/null || true`, { encoding: "utf8" }).trim();
|
|
5701
|
+
if (!result) {
|
|
5702
|
+
return true;
|
|
5703
|
+
}
|
|
5704
|
+
const pids = result.split("\n").filter(Boolean);
|
|
5705
|
+
console.log(`[DevServerRunner] Found ${pids.length} process(es) on port ${port}`);
|
|
5706
|
+
for (const pid of pids) {
|
|
5707
|
+
try {
|
|
5708
|
+
(0, import_child_process9.execSync)(`kill -15 ${pid} 2>/dev/null || true`);
|
|
5709
|
+
} catch {
|
|
5710
|
+
}
|
|
5711
|
+
}
|
|
5712
|
+
await this.wait(1e3);
|
|
5713
|
+
for (const pid of pids) {
|
|
5714
|
+
try {
|
|
5715
|
+
(0, import_child_process9.execSync)(`kill -0 ${pid} 2>/dev/null`);
|
|
5716
|
+
(0, import_child_process9.execSync)(`kill -9 ${pid} 2>/dev/null || true`);
|
|
5717
|
+
} catch {
|
|
5718
|
+
}
|
|
5719
|
+
}
|
|
5720
|
+
await this.wait(500);
|
|
5721
|
+
return !await isPortInUse(port);
|
|
5722
|
+
} catch (error) {
|
|
5723
|
+
console.error(`[DevServerRunner] Error killing process on port ${port}:`, error);
|
|
5724
|
+
return false;
|
|
5725
|
+
}
|
|
5726
|
+
}
|
|
5727
|
+
// ============ Private Methods ============
|
|
5728
|
+
async fetchEnvVars(projectPath) {
|
|
5729
|
+
try {
|
|
5730
|
+
const config = await (0, import_core7.loadConfig)();
|
|
5731
|
+
if (!config?.access_token || !config?.project_id) {
|
|
5732
|
+
return {};
|
|
5733
|
+
}
|
|
5734
|
+
const apiUrl = config.api_url || "https://episoda.dev";
|
|
5735
|
+
const result = await fetchEnvVarsWithCache(apiUrl, config.access_token, {
|
|
5736
|
+
projectId: config.project_id,
|
|
5737
|
+
cacheTtl: 300
|
|
5738
|
+
});
|
|
5739
|
+
console.log(`[DevServerRunner] Loaded ${Object.keys(result.envVars).length} env vars`);
|
|
5740
|
+
const envFilePath = path12.join(projectPath, ".env");
|
|
5741
|
+
if (!fs11.existsSync(envFilePath) && Object.keys(result.envVars).length > 0) {
|
|
5742
|
+
console.log(`[DevServerRunner] Writing .env file`);
|
|
5743
|
+
writeEnvFile(projectPath, result.envVars);
|
|
5744
|
+
}
|
|
5745
|
+
return result.envVars;
|
|
5746
|
+
} catch (error) {
|
|
5747
|
+
console.warn(`[DevServerRunner] Failed to fetch env vars:`, error);
|
|
5748
|
+
return {};
|
|
5749
|
+
}
|
|
5750
|
+
}
|
|
5751
|
+
spawnProcess(projectPath, port, moduleUid, logPath, customCommand, envVars) {
|
|
5752
|
+
this.rotateLogIfNeeded(logPath);
|
|
5753
|
+
const nodeOptions = process.env.NODE_OPTIONS || "";
|
|
5754
|
+
const memoryFlag = `--max-old-space-size=${DEV_SERVER_CONSTANTS.NODE_MEMORY_LIMIT_MB}`;
|
|
5755
|
+
const enhancedNodeOptions = nodeOptions.includes("max-old-space-size") ? nodeOptions : `${nodeOptions} ${memoryFlag}`.trim();
|
|
5756
|
+
const command = customCommand || "npm run dev";
|
|
5757
|
+
const [cmd, ...args] = command.split(" ");
|
|
5758
|
+
console.log(`[DevServerRunner] Running: ${command}`);
|
|
5759
|
+
const mergedEnv = {
|
|
5760
|
+
...process.env,
|
|
5761
|
+
...envVars,
|
|
5762
|
+
PORT: String(port),
|
|
5763
|
+
NODE_OPTIONS: enhancedNodeOptions
|
|
5764
|
+
};
|
|
5765
|
+
const proc = (0, import_child_process9.spawn)(cmd, args, {
|
|
5766
|
+
cwd: projectPath,
|
|
5767
|
+
env: mergedEnv,
|
|
5768
|
+
stdio: ["ignore", "pipe", "pipe"],
|
|
5769
|
+
detached: false,
|
|
5770
|
+
shell: true
|
|
5771
|
+
});
|
|
5772
|
+
proc.stdout?.on("data", (data) => {
|
|
5773
|
+
const line = data.toString().trim();
|
|
5774
|
+
if (line) {
|
|
5775
|
+
console.log(`[DevServer:${moduleUid}] ${line}`);
|
|
5776
|
+
this.writeToLog(logPath, line);
|
|
5777
|
+
}
|
|
5778
|
+
});
|
|
5779
|
+
proc.stderr?.on("data", (data) => {
|
|
5780
|
+
const line = data.toString().trim();
|
|
5781
|
+
if (line) {
|
|
5782
|
+
console.error(`[DevServer:${moduleUid}] ${line}`);
|
|
5783
|
+
this.writeToLog(logPath, line, true);
|
|
5784
|
+
}
|
|
5785
|
+
});
|
|
5786
|
+
return proc;
|
|
5787
|
+
}
|
|
5788
|
+
setupProcessHandlers(moduleUid, proc, logPath) {
|
|
5789
|
+
proc.on("exit", (code, signal) => {
|
|
5790
|
+
this.handleProcessExit(moduleUid, code, signal);
|
|
5791
|
+
});
|
|
5792
|
+
proc.on("error", (error) => {
|
|
5793
|
+
console.error(`[DevServerRunner] Process error for ${moduleUid}:`, error);
|
|
5794
|
+
this.writeToLog(logPath, `Process error: ${error.message}`, true);
|
|
5795
|
+
this.emit("error", moduleUid, error);
|
|
5796
|
+
});
|
|
5797
|
+
}
|
|
5798
|
+
async handleProcessExit(moduleUid, code, signal) {
|
|
5799
|
+
const state = this.servers.get(moduleUid);
|
|
5800
|
+
if (!state) {
|
|
5801
|
+
return;
|
|
5802
|
+
}
|
|
5803
|
+
const exitReason = signal ? `signal ${signal}` : `code ${code}`;
|
|
5804
|
+
console.log(`[DevServerRunner] Process for ${moduleUid} exited with ${exitReason}`);
|
|
5805
|
+
this.writeToLog(state.logFile, `Process exited with ${exitReason}`, true);
|
|
5806
|
+
if (!state.autoRestartEnabled) {
|
|
5807
|
+
this.servers.delete(moduleUid);
|
|
5808
|
+
return;
|
|
5809
|
+
}
|
|
5810
|
+
if (state.restartCount >= DEV_SERVER_CONSTANTS.MAX_RESTART_ATTEMPTS) {
|
|
5811
|
+
console.error(`[DevServerRunner] Max restart attempts reached for ${moduleUid}`);
|
|
5812
|
+
this.writeToLog(state.logFile, "Max restart attempts reached", true);
|
|
5813
|
+
this.servers.delete(moduleUid);
|
|
5814
|
+
return;
|
|
5815
|
+
}
|
|
5816
|
+
const delay = this.calculateRestartDelay(state.restartCount);
|
|
5817
|
+
console.log(`[DevServerRunner] Restarting ${moduleUid} in ${delay}ms (attempt ${state.restartCount + 1})`);
|
|
5818
|
+
await this.wait(delay);
|
|
5819
|
+
if (!this.servers.has(moduleUid)) {
|
|
5820
|
+
return;
|
|
5821
|
+
}
|
|
5822
|
+
const logPath = state.logFile || this.getLogFilePath(moduleUid);
|
|
5823
|
+
const newProcess = this.spawnProcess(
|
|
5824
|
+
state.projectPath,
|
|
5825
|
+
state.port,
|
|
5826
|
+
moduleUid,
|
|
5827
|
+
logPath,
|
|
5828
|
+
state.customCommand,
|
|
5829
|
+
state.injectedEnvVars
|
|
5830
|
+
);
|
|
5831
|
+
state.process = newProcess;
|
|
5832
|
+
state.restartCount++;
|
|
5833
|
+
state.lastRestartAt = /* @__PURE__ */ new Date();
|
|
5834
|
+
this.setupProcessHandlers(moduleUid, newProcess, logPath);
|
|
5835
|
+
const ready = await this.waitForPort(state.port, DEV_SERVER_CONSTANTS.STARTUP_TIMEOUT_MS);
|
|
5836
|
+
if (ready) {
|
|
5837
|
+
console.log(`[DevServerRunner] Server ${moduleUid} restarted successfully`);
|
|
5838
|
+
state.restartCount = 0;
|
|
5839
|
+
this.emit("restarted", moduleUid, state.restartCount);
|
|
5840
|
+
} else {
|
|
5841
|
+
console.error(`[DevServerRunner] Server ${moduleUid} failed to restart after attempt ${state.restartCount}`);
|
|
5842
|
+
this.writeToLog(logPath, `Failed to restart (attempt ${state.restartCount})`, true);
|
|
5843
|
+
if (state.restartCount >= DEV_SERVER_CONSTANTS.MAX_RESTART_ATTEMPTS) {
|
|
5844
|
+
console.error(`[DevServerRunner] Max restart attempts reached for ${moduleUid}, cleaning up`);
|
|
5845
|
+
this.writeToLog(logPath, "Max restart attempts reached, giving up", true);
|
|
5846
|
+
this.servers.delete(moduleUid);
|
|
5847
|
+
this.emit("permanent_failure", moduleUid, new Error("Max restart attempts reached"));
|
|
5848
|
+
}
|
|
5849
|
+
}
|
|
5850
|
+
}
|
|
5851
|
+
calculateRestartDelay(restartCount) {
|
|
5852
|
+
const delay = DEV_SERVER_CONSTANTS.INITIAL_RESTART_DELAY_MS * Math.pow(2, restartCount);
|
|
5853
|
+
return Math.min(delay, DEV_SERVER_CONSTANTS.MAX_RESTART_DELAY_MS);
|
|
5854
|
+
}
|
|
5855
|
+
async checkHealth(port) {
|
|
5856
|
+
return new Promise((resolve3) => {
|
|
5857
|
+
const req = http.request(
|
|
5858
|
+
{
|
|
5859
|
+
hostname: "localhost",
|
|
5860
|
+
port,
|
|
5861
|
+
path: "/",
|
|
5862
|
+
method: "HEAD",
|
|
5863
|
+
timeout: DEV_SERVER_CONSTANTS.HEALTH_CHECK_TIMEOUT_MS
|
|
5864
|
+
},
|
|
5865
|
+
() => resolve3(true)
|
|
5866
|
+
);
|
|
5867
|
+
req.on("error", () => resolve3(false));
|
|
5868
|
+
req.on("timeout", () => {
|
|
5869
|
+
req.destroy();
|
|
5870
|
+
resolve3(false);
|
|
5871
|
+
});
|
|
5872
|
+
req.end();
|
|
5873
|
+
});
|
|
5874
|
+
}
|
|
5875
|
+
async waitForPort(port, timeoutMs) {
|
|
5876
|
+
const startTime = Date.now();
|
|
5877
|
+
const checkInterval = 500;
|
|
5878
|
+
while (Date.now() - startTime < timeoutMs) {
|
|
5879
|
+
if (await isPortInUse(port)) {
|
|
5880
|
+
return true;
|
|
5881
|
+
}
|
|
5882
|
+
await this.wait(checkInterval);
|
|
5883
|
+
}
|
|
5884
|
+
return false;
|
|
5885
|
+
}
|
|
5886
|
+
wait(ms) {
|
|
5887
|
+
return new Promise((resolve3) => setTimeout(resolve3, ms));
|
|
5888
|
+
}
|
|
5889
|
+
getLogsDir() {
|
|
5890
|
+
const logsDir = path12.join((0, import_core7.getConfigDir)(), "logs");
|
|
5891
|
+
if (!fs11.existsSync(logsDir)) {
|
|
5892
|
+
fs11.mkdirSync(logsDir, { recursive: true });
|
|
5893
|
+
}
|
|
5894
|
+
return logsDir;
|
|
5895
|
+
}
|
|
5896
|
+
getLogFilePath(moduleUid) {
|
|
5897
|
+
return path12.join(this.getLogsDir(), `dev-${moduleUid}.log`);
|
|
5898
|
+
}
|
|
5899
|
+
rotateLogIfNeeded(logPath) {
|
|
5900
|
+
try {
|
|
5901
|
+
if (fs11.existsSync(logPath)) {
|
|
5902
|
+
const stats = fs11.statSync(logPath);
|
|
5903
|
+
if (stats.size > DEV_SERVER_CONSTANTS.MAX_LOG_SIZE_BYTES) {
|
|
5904
|
+
const backupPath = `${logPath}.1`;
|
|
5905
|
+
if (fs11.existsSync(backupPath)) {
|
|
5906
|
+
fs11.unlinkSync(backupPath);
|
|
5907
|
+
}
|
|
5908
|
+
fs11.renameSync(logPath, backupPath);
|
|
5909
|
+
}
|
|
5910
|
+
}
|
|
5911
|
+
} catch {
|
|
5912
|
+
}
|
|
5913
|
+
}
|
|
5914
|
+
writeToLog(logPath, line, isError = false) {
|
|
5915
|
+
if (!logPath) return;
|
|
5916
|
+
try {
|
|
5917
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
5918
|
+
const prefix = isError ? "ERR" : "OUT";
|
|
5919
|
+
fs11.appendFileSync(logPath, `[${timestamp}] [${prefix}] ${line}
|
|
5920
|
+
`);
|
|
5921
|
+
} catch {
|
|
5922
|
+
}
|
|
5923
|
+
}
|
|
5924
|
+
stateToStatus(state) {
|
|
5925
|
+
return {
|
|
5926
|
+
moduleUid: state.moduleUid,
|
|
5927
|
+
port: state.port,
|
|
5928
|
+
pid: state.process.pid,
|
|
5929
|
+
startedAt: state.startedAt,
|
|
5930
|
+
uptime: Math.floor((Date.now() - state.startedAt.getTime()) / 1e3),
|
|
5931
|
+
restartCount: state.restartCount,
|
|
5932
|
+
lastRestartAt: state.lastRestartAt,
|
|
5933
|
+
autoRestartEnabled: state.autoRestartEnabled,
|
|
5934
|
+
logFile: state.logFile
|
|
5935
|
+
};
|
|
5936
|
+
}
|
|
5937
|
+
};
|
|
5938
|
+
var instance2 = null;
|
|
5939
|
+
function getDevServerRunner() {
|
|
5940
|
+
if (!instance2) {
|
|
5941
|
+
instance2 = new DevServerRunner();
|
|
5942
|
+
}
|
|
5943
|
+
return instance2;
|
|
5944
|
+
}
|
|
5945
|
+
|
|
5946
|
+
// src/preview/preview-manager.ts
|
|
5947
|
+
var DEFAULT_PORT = 3e3;
|
|
5948
|
+
var PreviewManager = class extends import_events3.EventEmitter {
|
|
5949
|
+
constructor() {
|
|
5950
|
+
super();
|
|
5951
|
+
this.previews = /* @__PURE__ */ new Map();
|
|
5952
|
+
this.startingModules = /* @__PURE__ */ new Set();
|
|
5953
|
+
// Prevents concurrent startPreview() race conditions
|
|
5954
|
+
this.initialized = false;
|
|
5955
|
+
this.devServer = getDevServerRunner();
|
|
5956
|
+
this.tunnel = getTunnelManager();
|
|
5957
|
+
this.setupEventForwarding();
|
|
5958
|
+
}
|
|
5959
|
+
/**
|
|
5960
|
+
* Initialize the preview manager
|
|
5961
|
+
*
|
|
5962
|
+
* Must be called before starting any previews.
|
|
5963
|
+
* Initializes the tunnel manager (ensures cloudflared, cleans orphans).
|
|
5964
|
+
*/
|
|
5965
|
+
async initialize() {
|
|
5966
|
+
if (this.initialized) {
|
|
5967
|
+
return;
|
|
5968
|
+
}
|
|
5969
|
+
console.log("[PreviewManager] Initializing...");
|
|
5970
|
+
await this.tunnel.initialize();
|
|
5971
|
+
this.initialized = true;
|
|
5972
|
+
console.log("[PreviewManager] Initialized");
|
|
5973
|
+
}
|
|
5974
|
+
/**
|
|
5975
|
+
* Start a preview for a module
|
|
5976
|
+
*
|
|
5977
|
+
* This will:
|
|
5978
|
+
* 1. Start the dev server in the worktree
|
|
5979
|
+
* 2. Provision a Named Tunnel via the platform API
|
|
5980
|
+
* 3. Connect the tunnel to the dev server
|
|
5981
|
+
* 4. Return the preview URL
|
|
5982
|
+
*
|
|
5983
|
+
* @param config - Preview configuration
|
|
5984
|
+
* @returns Result with success status and preview URL
|
|
5985
|
+
*/
|
|
5986
|
+
async startPreview(config) {
|
|
5987
|
+
const { moduleUid, worktreePath, port = DEFAULT_PORT, customCommand } = config;
|
|
5988
|
+
if (!worktreePath) {
|
|
5989
|
+
return { success: false, error: "Worktree path is required" };
|
|
5990
|
+
}
|
|
5991
|
+
if (!(0, import_fs.existsSync)(worktreePath)) {
|
|
5992
|
+
console.error(`[PreviewManager] Worktree path does not exist: ${worktreePath}`);
|
|
5993
|
+
return { success: false, error: `Worktree path does not exist: ${worktreePath}` };
|
|
5994
|
+
}
|
|
5995
|
+
try {
|
|
5996
|
+
const stats = (0, import_fs.statSync)(worktreePath);
|
|
5997
|
+
if (!stats.isDirectory()) {
|
|
5998
|
+
console.error(`[PreviewManager] Worktree path is not a directory: ${worktreePath}`);
|
|
5999
|
+
return { success: false, error: `Worktree path is not a directory: ${worktreePath}` };
|
|
6000
|
+
}
|
|
6001
|
+
} catch (error) {
|
|
6002
|
+
console.error(`[PreviewManager] Cannot access worktree path: ${worktreePath}`, error);
|
|
6003
|
+
return { success: false, error: `Cannot access worktree path: ${worktreePath}` };
|
|
6004
|
+
}
|
|
6005
|
+
if (!this.initialized) {
|
|
6006
|
+
await this.initialize();
|
|
6007
|
+
}
|
|
6008
|
+
if (this.startingModules.has(moduleUid)) {
|
|
6009
|
+
console.log(`[PreviewManager] Preview startup already in progress for ${moduleUid}`);
|
|
6010
|
+
return { success: false, error: "Preview startup already in progress" };
|
|
6011
|
+
}
|
|
6012
|
+
const existing = this.previews.get(moduleUid);
|
|
6013
|
+
if (existing && (existing.state === "live" || existing.state === "running")) {
|
|
6014
|
+
console.log(`[PreviewManager] Preview already running for ${moduleUid}`);
|
|
6015
|
+
return {
|
|
6016
|
+
success: true,
|
|
6017
|
+
previewUrl: existing.tunnelUrl,
|
|
6018
|
+
alreadyRunning: true
|
|
6019
|
+
};
|
|
6020
|
+
}
|
|
6021
|
+
this.startingModules.add(moduleUid);
|
|
6022
|
+
console.log(`[PreviewManager] Starting preview for ${moduleUid} at ${worktreePath}:${port}`);
|
|
6023
|
+
const state = {
|
|
6024
|
+
moduleUid,
|
|
6025
|
+
worktreePath,
|
|
6026
|
+
port,
|
|
6027
|
+
state: "starting",
|
|
6028
|
+
startedAt: /* @__PURE__ */ new Date()
|
|
6029
|
+
};
|
|
6030
|
+
this.previews.set(moduleUid, state);
|
|
6031
|
+
this.emitStateChange(moduleUid, "starting");
|
|
6032
|
+
try {
|
|
6033
|
+
console.log(`[PreviewManager] Starting dev server for ${moduleUid}...`);
|
|
6034
|
+
const devResult = await this.devServer.start({
|
|
6035
|
+
projectPath: worktreePath,
|
|
6036
|
+
port,
|
|
6037
|
+
moduleUid,
|
|
6038
|
+
customCommand,
|
|
6039
|
+
autoRestart: true
|
|
6040
|
+
});
|
|
6041
|
+
if (!devResult.success) {
|
|
6042
|
+
state.state = "error";
|
|
6043
|
+
state.error = devResult.error || "Failed to start dev server";
|
|
6044
|
+
this.emitStateChange(moduleUid, "error");
|
|
6045
|
+
this.emit("error", moduleUid, new Error(state.error));
|
|
6046
|
+
return { success: false, error: state.error };
|
|
6047
|
+
}
|
|
6048
|
+
state.state = "running";
|
|
6049
|
+
this.emitStateChange(moduleUid, "running");
|
|
6050
|
+
console.log(`[PreviewManager] Dev server running on port ${port}`);
|
|
6051
|
+
console.log(`[PreviewManager] Starting Named Tunnel for ${moduleUid}...`);
|
|
6052
|
+
state.state = "tunneling";
|
|
6053
|
+
this.emitStateChange(moduleUid, "tunneling");
|
|
6054
|
+
const MAX_TUNNEL_RETRIES = 2;
|
|
6055
|
+
let tunnelResult = { success: false };
|
|
6056
|
+
let lastError = "";
|
|
6057
|
+
for (let attempt = 1; attempt <= MAX_TUNNEL_RETRIES; attempt++) {
|
|
6058
|
+
if (attempt > 1) {
|
|
6059
|
+
console.log(`[PreviewManager] Retrying tunnel for ${moduleUid} (attempt ${attempt}/${MAX_TUNNEL_RETRIES})...`);
|
|
6060
|
+
await new Promise((resolve3) => setTimeout(resolve3, 2e3));
|
|
6061
|
+
}
|
|
6062
|
+
tunnelResult = await this.tunnel.startTunnel({
|
|
6063
|
+
moduleUid,
|
|
6064
|
+
port,
|
|
6065
|
+
mode: "named",
|
|
6066
|
+
// Named Tunnels only
|
|
6067
|
+
onStatusChange: (status, error) => {
|
|
6068
|
+
console.log(`[PreviewManager] Tunnel status for ${moduleUid}: ${status}${error ? ` - ${error}` : ""}`);
|
|
6069
|
+
if (status === "error") {
|
|
6070
|
+
state.state = "error";
|
|
6071
|
+
state.error = error || "Tunnel error";
|
|
6072
|
+
this.emitStateChange(moduleUid, "error");
|
|
6073
|
+
} else if (status === "disconnected") {
|
|
6074
|
+
state.state = "running";
|
|
6075
|
+
state.tunnelUrl = void 0;
|
|
6076
|
+
this.emitStateChange(moduleUid, "running");
|
|
6077
|
+
} else if (status === "reconnecting") {
|
|
6078
|
+
state.state = "tunneling";
|
|
6079
|
+
this.emitStateChange(moduleUid, "tunneling");
|
|
6080
|
+
}
|
|
6081
|
+
},
|
|
6082
|
+
onUrl: (url) => {
|
|
6083
|
+
state.tunnelUrl = url;
|
|
6084
|
+
}
|
|
6085
|
+
});
|
|
6086
|
+
if (tunnelResult.success) {
|
|
6087
|
+
break;
|
|
6088
|
+
}
|
|
6089
|
+
lastError = tunnelResult.error || "Unknown tunnel error";
|
|
6090
|
+
console.warn(`[PreviewManager] Tunnel attempt ${attempt} failed for ${moduleUid}: ${lastError}`);
|
|
6091
|
+
}
|
|
6092
|
+
if (!tunnelResult.success) {
|
|
6093
|
+
console.error(`[PreviewManager] Tunnel failed after ${MAX_TUNNEL_RETRIES} attempts for ${moduleUid}, stopping dev server`);
|
|
6094
|
+
try {
|
|
6095
|
+
await this.devServer.stop(moduleUid);
|
|
6096
|
+
} catch (cleanupError) {
|
|
6097
|
+
console.warn(`[PreviewManager] Error cleaning up dev server after tunnel failure:`, cleanupError);
|
|
6098
|
+
}
|
|
6099
|
+
state.state = "error";
|
|
6100
|
+
state.error = lastError;
|
|
6101
|
+
this.previews.delete(moduleUid);
|
|
6102
|
+
this.emitStateChange(moduleUid, "error");
|
|
6103
|
+
return {
|
|
6104
|
+
success: false,
|
|
6105
|
+
error: `Tunnel failed after ${MAX_TUNNEL_RETRIES} attempts: ${lastError}`
|
|
6106
|
+
};
|
|
6107
|
+
}
|
|
6108
|
+
state.state = "live";
|
|
6109
|
+
state.tunnelUrl = tunnelResult.url;
|
|
6110
|
+
state.error = void 0;
|
|
6111
|
+
this.emitStateChange(moduleUid, "live");
|
|
6112
|
+
this.emit("live", moduleUid, tunnelResult.url);
|
|
6113
|
+
console.log(`[PreviewManager] Preview live for ${moduleUid}: ${tunnelResult.url}`);
|
|
6114
|
+
return {
|
|
6115
|
+
success: true,
|
|
6116
|
+
previewUrl: tunnelResult.url
|
|
6117
|
+
};
|
|
6118
|
+
} catch (error) {
|
|
6119
|
+
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
6120
|
+
console.error(`[PreviewManager] Error starting preview for ${moduleUid}:`, error);
|
|
6121
|
+
state.state = "error";
|
|
6122
|
+
state.error = errorMsg;
|
|
6123
|
+
this.emitStateChange(moduleUid, "error");
|
|
6124
|
+
this.emit("error", moduleUid, error instanceof Error ? error : new Error(errorMsg));
|
|
6125
|
+
return { success: false, error: errorMsg };
|
|
6126
|
+
} finally {
|
|
6127
|
+
this.startingModules.delete(moduleUid);
|
|
6128
|
+
}
|
|
6129
|
+
}
|
|
6130
|
+
/**
|
|
6131
|
+
* Stop a preview for a module
|
|
6132
|
+
*
|
|
6133
|
+
* This will:
|
|
6134
|
+
* 1. Stop the tunnel
|
|
6135
|
+
* 2. Stop the dev server
|
|
6136
|
+
* 3. Clear the tunnel URL from the platform API
|
|
6137
|
+
*
|
|
6138
|
+
* @param moduleUid - Module identifier
|
|
6139
|
+
*/
|
|
6140
|
+
async stopPreview(moduleUid) {
|
|
6141
|
+
console.log(`[PreviewManager] Stopping preview for ${moduleUid}`);
|
|
6142
|
+
const state = this.previews.get(moduleUid);
|
|
6143
|
+
try {
|
|
6144
|
+
await this.tunnel.stopTunnel(moduleUid);
|
|
6145
|
+
} catch (error) {
|
|
6146
|
+
console.warn(`[PreviewManager] Error stopping tunnel for ${moduleUid}:`, error);
|
|
6147
|
+
}
|
|
6148
|
+
try {
|
|
6149
|
+
await this.devServer.stop(moduleUid);
|
|
6150
|
+
} catch (error) {
|
|
6151
|
+
console.warn(`[PreviewManager] Error stopping dev server for ${moduleUid}:`, error);
|
|
6152
|
+
}
|
|
6153
|
+
try {
|
|
6154
|
+
await clearTunnelUrl(moduleUid);
|
|
6155
|
+
} catch (error) {
|
|
6156
|
+
console.warn(`[PreviewManager] Error clearing tunnel URL for ${moduleUid}:`, error);
|
|
6157
|
+
}
|
|
6158
|
+
if (state) {
|
|
6159
|
+
state.state = "stopped";
|
|
6160
|
+
state.tunnelUrl = void 0;
|
|
6161
|
+
}
|
|
6162
|
+
this.previews.delete(moduleUid);
|
|
6163
|
+
this.emitStateChange(moduleUid, "stopped");
|
|
6164
|
+
this.emit("stopped", moduleUid);
|
|
6165
|
+
console.log(`[PreviewManager] Preview stopped for ${moduleUid}`);
|
|
6166
|
+
}
|
|
6167
|
+
/**
|
|
6168
|
+
* Restart a preview for a module
|
|
6169
|
+
*
|
|
6170
|
+
* @param moduleUid - Module identifier
|
|
6171
|
+
* @returns Result with success status and new preview URL
|
|
6172
|
+
*/
|
|
6173
|
+
async restartPreview(moduleUid) {
|
|
6174
|
+
const state = this.previews.get(moduleUid);
|
|
6175
|
+
if (!state) {
|
|
6176
|
+
return { success: false, error: `No preview found for ${moduleUid}` };
|
|
6177
|
+
}
|
|
6178
|
+
console.log(`[PreviewManager] Restarting preview for ${moduleUid}`);
|
|
6179
|
+
await this.stopPreview(moduleUid);
|
|
6180
|
+
await new Promise((resolve3) => setTimeout(resolve3, 1e3));
|
|
6181
|
+
return this.startPreview({
|
|
6182
|
+
moduleUid,
|
|
6183
|
+
worktreePath: state.worktreePath,
|
|
6184
|
+
port: state.port
|
|
6185
|
+
});
|
|
6186
|
+
}
|
|
6187
|
+
/**
|
|
6188
|
+
* Get the status of a preview
|
|
6189
|
+
*
|
|
6190
|
+
* @param moduleUid - Module identifier
|
|
6191
|
+
* @returns Preview status or undefined if not found
|
|
6192
|
+
*/
|
|
6193
|
+
getStatus(moduleUid) {
|
|
6194
|
+
const state = this.previews.get(moduleUid);
|
|
6195
|
+
if (!state) {
|
|
6196
|
+
return void 0;
|
|
6197
|
+
}
|
|
6198
|
+
const devServerStatus = this.devServer.getStatus(moduleUid);
|
|
6199
|
+
const tunnelInfo = this.tunnel.getTunnel(moduleUid);
|
|
6200
|
+
return {
|
|
6201
|
+
moduleUid: state.moduleUid,
|
|
6202
|
+
state: state.state,
|
|
6203
|
+
devServer: devServerStatus,
|
|
6204
|
+
tunnelUrl: state.tunnelUrl,
|
|
6205
|
+
tunnelState: tunnelInfo?.status === "connected" ? "connected" : tunnelInfo?.status === "starting" ? "starting" : tunnelInfo?.status === "error" ? "error" : tunnelInfo?.status === "disconnected" ? "disconnected" : void 0,
|
|
6206
|
+
port: state.port,
|
|
6207
|
+
error: state.error,
|
|
6208
|
+
startedAt: state.startedAt
|
|
6209
|
+
};
|
|
6210
|
+
}
|
|
6211
|
+
/**
|
|
6212
|
+
* Get the status of all previews
|
|
6213
|
+
*
|
|
6214
|
+
* @returns Array of preview statuses
|
|
6215
|
+
*/
|
|
6216
|
+
getAllStatus() {
|
|
6217
|
+
return Array.from(this.previews.keys()).map((uid) => this.getStatus(uid));
|
|
6218
|
+
}
|
|
6219
|
+
/**
|
|
6220
|
+
* Check if a preview is running
|
|
6221
|
+
*
|
|
6222
|
+
* @param moduleUid - Module identifier
|
|
6223
|
+
* @returns True if preview is running (dev server active)
|
|
6224
|
+
*/
|
|
6225
|
+
isRunning(moduleUid) {
|
|
6226
|
+
const state = this.previews.get(moduleUid);
|
|
6227
|
+
return !!state && (state.state === "running" || state.state === "live" || state.state === "tunneling");
|
|
6228
|
+
}
|
|
6229
|
+
/**
|
|
6230
|
+
* Check if a preview is fully live (dev server + tunnel)
|
|
6231
|
+
*
|
|
6232
|
+
* @param moduleUid - Module identifier
|
|
6233
|
+
* @returns True if preview is fully live with tunnel connected
|
|
6234
|
+
*/
|
|
6235
|
+
isLive(moduleUid) {
|
|
6236
|
+
const state = this.previews.get(moduleUid);
|
|
6237
|
+
return !!state && state.state === "live";
|
|
6238
|
+
}
|
|
6239
|
+
/**
|
|
6240
|
+
* Get the preview URL for a module
|
|
6241
|
+
*
|
|
6242
|
+
* @param moduleUid - Module identifier
|
|
6243
|
+
* @returns Preview URL or undefined if not available
|
|
6244
|
+
*/
|
|
6245
|
+
getPreviewUrl(moduleUid) {
|
|
6246
|
+
return this.previews.get(moduleUid)?.tunnelUrl;
|
|
6247
|
+
}
|
|
6248
|
+
/**
|
|
6249
|
+
* Stop all previews
|
|
6250
|
+
*/
|
|
6251
|
+
async stopAll() {
|
|
6252
|
+
console.log("[PreviewManager] Stopping all previews...");
|
|
6253
|
+
const moduleUids = Array.from(this.previews.keys());
|
|
6254
|
+
await Promise.all(moduleUids.map((uid) => this.stopPreview(uid)));
|
|
6255
|
+
console.log("[PreviewManager] All previews stopped");
|
|
6256
|
+
}
|
|
6257
|
+
/**
|
|
6258
|
+
* Get all module UIDs with active previews
|
|
6259
|
+
*/
|
|
6260
|
+
getActiveModuleUids() {
|
|
6261
|
+
return Array.from(this.previews.keys()).filter((uid) => this.isRunning(uid));
|
|
6262
|
+
}
|
|
6263
|
+
// ============ Private Methods ============
|
|
6264
|
+
setupEventForwarding() {
|
|
6265
|
+
this.devServer.on("started", (moduleUid, port) => {
|
|
6266
|
+
console.log(`[PreviewManager] Dev server started: ${moduleUid} on port ${port}`);
|
|
6267
|
+
});
|
|
6268
|
+
this.devServer.on("stopped", (moduleUid) => {
|
|
6269
|
+
console.log(`[PreviewManager] Dev server stopped: ${moduleUid}`);
|
|
6270
|
+
const state = this.previews.get(moduleUid);
|
|
6271
|
+
if (state && state.state !== "stopped") {
|
|
6272
|
+
state.state = "error";
|
|
6273
|
+
state.error = "Dev server stopped unexpectedly";
|
|
6274
|
+
this.emitStateChange(moduleUid, "error");
|
|
6275
|
+
}
|
|
6276
|
+
});
|
|
6277
|
+
this.devServer.on("error", (moduleUid, error) => {
|
|
6278
|
+
console.error(`[PreviewManager] Dev server error: ${moduleUid}`, error);
|
|
6279
|
+
const state = this.previews.get(moduleUid);
|
|
6280
|
+
if (state) {
|
|
6281
|
+
state.state = "error";
|
|
6282
|
+
state.error = error.message;
|
|
6283
|
+
this.emitStateChange(moduleUid, "error");
|
|
6284
|
+
}
|
|
6285
|
+
});
|
|
6286
|
+
this.devServer.on("permanent_failure", (moduleUid, error) => {
|
|
6287
|
+
console.error(`[PreviewManager] Dev server permanent failure: ${moduleUid}`, error);
|
|
6288
|
+
const state = this.previews.get(moduleUid);
|
|
6289
|
+
if (state) {
|
|
6290
|
+
state.state = "error";
|
|
6291
|
+
state.error = `Dev server failed permanently: ${error.message}`;
|
|
6292
|
+
this.emitStateChange(moduleUid, "error");
|
|
6293
|
+
this.emit("error", moduleUid, error);
|
|
6294
|
+
this.tunnel.stopTunnel(moduleUid).catch(() => {
|
|
6295
|
+
});
|
|
6296
|
+
this.previews.delete(moduleUid);
|
|
6297
|
+
}
|
|
6298
|
+
});
|
|
6299
|
+
this.tunnel.on("tunnel", (event) => {
|
|
6300
|
+
const moduleUid = event.moduleUid;
|
|
6301
|
+
const state = this.previews.get(moduleUid);
|
|
6302
|
+
if (!state) return;
|
|
6303
|
+
if (event.type === "started") {
|
|
6304
|
+
state.tunnelUrl = event.url;
|
|
6305
|
+
state.state = "live";
|
|
6306
|
+
this.emitStateChange(moduleUid, "live");
|
|
6307
|
+
this.emit("live", moduleUid, event.url);
|
|
6308
|
+
} else if (event.type === "stopped") {
|
|
6309
|
+
state.tunnelUrl = void 0;
|
|
6310
|
+
if (state.state === "live") {
|
|
6311
|
+
state.state = "running";
|
|
6312
|
+
this.emitStateChange(moduleUid, "running");
|
|
6313
|
+
}
|
|
6314
|
+
} else if (event.type === "error") {
|
|
6315
|
+
console.error(`[PreviewManager] Tunnel error for ${moduleUid}:`, event.error);
|
|
6316
|
+
} else if (event.type === "reconnecting") {
|
|
6317
|
+
state.state = "tunneling";
|
|
6318
|
+
this.emitStateChange(moduleUid, "tunneling");
|
|
6319
|
+
}
|
|
6320
|
+
});
|
|
6321
|
+
}
|
|
6322
|
+
emitStateChange(moduleUid, state) {
|
|
6323
|
+
this.emit("stateChange", moduleUid, state);
|
|
6324
|
+
}
|
|
6325
|
+
};
|
|
6326
|
+
var instance3 = null;
|
|
6327
|
+
function getPreviewManager() {
|
|
6328
|
+
if (!instance3) {
|
|
6329
|
+
instance3 = new PreviewManager();
|
|
6330
|
+
}
|
|
6331
|
+
return instance3;
|
|
6332
|
+
}
|
|
6333
|
+
|
|
5534
6334
|
// src/utils/dev-server.ts
|
|
5535
|
-
var
|
|
5536
|
-
var
|
|
5537
|
-
var
|
|
6335
|
+
var import_child_process10 = require("child_process");
|
|
6336
|
+
var import_core8 = __toESM(require_dist());
|
|
6337
|
+
var fs12 = __toESM(require("fs"));
|
|
6338
|
+
var path13 = __toESM(require("path"));
|
|
5538
6339
|
var MAX_RESTART_ATTEMPTS = 5;
|
|
5539
6340
|
var INITIAL_RESTART_DELAY_MS = 2e3;
|
|
5540
6341
|
var MAX_RESTART_DELAY_MS = 3e4;
|
|
@@ -5542,26 +6343,26 @@ var MAX_LOG_SIZE_BYTES = 5 * 1024 * 1024;
|
|
|
5542
6343
|
var NODE_MEMORY_LIMIT_MB = 2048;
|
|
5543
6344
|
var activeServers = /* @__PURE__ */ new Map();
|
|
5544
6345
|
function getLogsDir() {
|
|
5545
|
-
const logsDir =
|
|
5546
|
-
if (!
|
|
5547
|
-
|
|
6346
|
+
const logsDir = path13.join((0, import_core8.getConfigDir)(), "logs");
|
|
6347
|
+
if (!fs12.existsSync(logsDir)) {
|
|
6348
|
+
fs12.mkdirSync(logsDir, { recursive: true });
|
|
5548
6349
|
}
|
|
5549
6350
|
return logsDir;
|
|
5550
6351
|
}
|
|
5551
6352
|
function getLogFilePath(moduleUid) {
|
|
5552
|
-
return
|
|
6353
|
+
return path13.join(getLogsDir(), `dev-${moduleUid}.log`);
|
|
5553
6354
|
}
|
|
5554
6355
|
function rotateLogIfNeeded(logPath) {
|
|
5555
6356
|
try {
|
|
5556
|
-
if (
|
|
5557
|
-
const stats =
|
|
6357
|
+
if (fs12.existsSync(logPath)) {
|
|
6358
|
+
const stats = fs12.statSync(logPath);
|
|
5558
6359
|
if (stats.size > MAX_LOG_SIZE_BYTES) {
|
|
5559
6360
|
const backupPath = `${logPath}.1`;
|
|
5560
|
-
if (
|
|
5561
|
-
|
|
6361
|
+
if (fs12.existsSync(backupPath)) {
|
|
6362
|
+
fs12.unlinkSync(backupPath);
|
|
5562
6363
|
}
|
|
5563
|
-
|
|
5564
|
-
console.log(`[DevServer] EP932: Rotated log file for ${
|
|
6364
|
+
fs12.renameSync(logPath, backupPath);
|
|
6365
|
+
console.log(`[DevServer] EP932: Rotated log file for ${path13.basename(logPath)}`);
|
|
5565
6366
|
}
|
|
5566
6367
|
}
|
|
5567
6368
|
} catch (error) {
|
|
@@ -5574,37 +6375,13 @@ function writeToLog(logPath, line, isError = false) {
|
|
|
5574
6375
|
const prefix = isError ? "ERR" : "OUT";
|
|
5575
6376
|
const logLine = `[${timestamp}] [${prefix}] ${line}
|
|
5576
6377
|
`;
|
|
5577
|
-
|
|
6378
|
+
fs12.appendFileSync(logPath, logLine);
|
|
5578
6379
|
} catch {
|
|
5579
6380
|
}
|
|
5580
6381
|
}
|
|
5581
|
-
async function isDevServerHealthy(port, timeoutMs = 5e3) {
|
|
5582
|
-
return new Promise((resolve3) => {
|
|
5583
|
-
const req = import_http.default.request(
|
|
5584
|
-
{
|
|
5585
|
-
hostname: "localhost",
|
|
5586
|
-
port,
|
|
5587
|
-
path: "/",
|
|
5588
|
-
method: "HEAD",
|
|
5589
|
-
timeout: timeoutMs
|
|
5590
|
-
},
|
|
5591
|
-
(res) => {
|
|
5592
|
-
resolve3(true);
|
|
5593
|
-
}
|
|
5594
|
-
);
|
|
5595
|
-
req.on("error", () => {
|
|
5596
|
-
resolve3(false);
|
|
5597
|
-
});
|
|
5598
|
-
req.on("timeout", () => {
|
|
5599
|
-
req.destroy();
|
|
5600
|
-
resolve3(false);
|
|
5601
|
-
});
|
|
5602
|
-
req.end();
|
|
5603
|
-
});
|
|
5604
|
-
}
|
|
5605
6382
|
async function killProcessOnPort(port) {
|
|
5606
6383
|
try {
|
|
5607
|
-
const result = (0,
|
|
6384
|
+
const result = (0, import_child_process10.execSync)(`lsof -ti:${port} 2>/dev/null || true`, { encoding: "utf8" }).trim();
|
|
5608
6385
|
if (!result) {
|
|
5609
6386
|
console.log(`[DevServer] EP929: No process found on port ${port}`);
|
|
5610
6387
|
return true;
|
|
@@ -5613,7 +6390,7 @@ async function killProcessOnPort(port) {
|
|
|
5613
6390
|
console.log(`[DevServer] EP929: Found ${pids.length} process(es) on port ${port}: ${pids.join(", ")}`);
|
|
5614
6391
|
for (const pid of pids) {
|
|
5615
6392
|
try {
|
|
5616
|
-
(0,
|
|
6393
|
+
(0, import_child_process10.execSync)(`kill -15 ${pid} 2>/dev/null || true`, { encoding: "utf8" });
|
|
5617
6394
|
console.log(`[DevServer] EP929: Sent SIGTERM to PID ${pid}`);
|
|
5618
6395
|
} catch {
|
|
5619
6396
|
}
|
|
@@ -5621,8 +6398,8 @@ async function killProcessOnPort(port) {
|
|
|
5621
6398
|
await new Promise((resolve3) => setTimeout(resolve3, 1e3));
|
|
5622
6399
|
for (const pid of pids) {
|
|
5623
6400
|
try {
|
|
5624
|
-
(0,
|
|
5625
|
-
(0,
|
|
6401
|
+
(0, import_child_process10.execSync)(`kill -0 ${pid} 2>/dev/null`, { encoding: "utf8" });
|
|
6402
|
+
(0, import_child_process10.execSync)(`kill -9 ${pid} 2>/dev/null || true`, { encoding: "utf8" });
|
|
5626
6403
|
console.log(`[DevServer] EP929: Force killed PID ${pid}`);
|
|
5627
6404
|
} catch {
|
|
5628
6405
|
}
|
|
@@ -5673,7 +6450,7 @@ function spawnDevServerProcess(projectPath, port, moduleUid, logPath, customComm
|
|
|
5673
6450
|
if (injectedCount > 0) {
|
|
5674
6451
|
console.log(`[DevServer] EP998: Injecting ${injectedCount} env vars from database`);
|
|
5675
6452
|
}
|
|
5676
|
-
const devProcess = (0,
|
|
6453
|
+
const devProcess = (0, import_child_process10.spawn)(cmd, args, {
|
|
5677
6454
|
cwd: projectPath,
|
|
5678
6455
|
env: mergedEnv,
|
|
5679
6456
|
stdio: ["ignore", "pipe", "pipe"],
|
|
@@ -5767,7 +6544,7 @@ async function startDevServer(projectPath, port = 3e3, moduleUid = "default", op
|
|
|
5767
6544
|
console.log(`[DevServer] EP932: Starting dev server for ${moduleUid} on port ${port} (auto-restart: ${autoRestart})...`);
|
|
5768
6545
|
let injectedEnvVars = {};
|
|
5769
6546
|
try {
|
|
5770
|
-
const config = await (0,
|
|
6547
|
+
const config = await (0, import_core8.loadConfig)();
|
|
5771
6548
|
if (config?.access_token && config?.project_id) {
|
|
5772
6549
|
const apiUrl = config.api_url || "https://episoda.dev";
|
|
5773
6550
|
const result = await fetchEnvVarsWithCache(apiUrl, config.access_token, {
|
|
@@ -5777,8 +6554,8 @@ async function startDevServer(projectPath, port = 3e3, moduleUid = "default", op
|
|
|
5777
6554
|
});
|
|
5778
6555
|
injectedEnvVars = result.envVars;
|
|
5779
6556
|
console.log(`[DevServer] EP998: Loaded ${Object.keys(injectedEnvVars).length} env vars (from ${result.fromCache ? "cache" : "server"})`);
|
|
5780
|
-
const envFilePath =
|
|
5781
|
-
if (!
|
|
6557
|
+
const envFilePath = path13.join(projectPath, ".env");
|
|
6558
|
+
if (!fs12.existsSync(envFilePath) && Object.keys(injectedEnvVars).length > 0) {
|
|
5782
6559
|
console.log(`[DevServer] EP1004: .env file missing, writing ${Object.keys(injectedEnvVars).length} vars to ${envFilePath}`);
|
|
5783
6560
|
writeEnvFile(projectPath, injectedEnvVars);
|
|
5784
6561
|
}
|
|
@@ -5882,17 +6659,11 @@ function getDevServerStatus() {
|
|
|
5882
6659
|
logFile: info.logFile
|
|
5883
6660
|
}));
|
|
5884
6661
|
}
|
|
5885
|
-
async function ensureDevServer(projectPath, port = 3e3, moduleUid = "default", customCommand) {
|
|
5886
|
-
if (await isPortInUse(port)) {
|
|
5887
|
-
return { success: true };
|
|
5888
|
-
}
|
|
5889
|
-
return startDevServer(projectPath, port, moduleUid, { autoRestart: true, customCommand });
|
|
5890
|
-
}
|
|
5891
6662
|
|
|
5892
6663
|
// src/utils/port-detect.ts
|
|
5893
|
-
var
|
|
5894
|
-
var
|
|
5895
|
-
var
|
|
6664
|
+
var fs13 = __toESM(require("fs"));
|
|
6665
|
+
var path14 = __toESM(require("path"));
|
|
6666
|
+
var DEFAULT_PORT2 = 3e3;
|
|
5896
6667
|
function detectDevPort(projectPath) {
|
|
5897
6668
|
const envPort = getPortFromEnv(projectPath);
|
|
5898
6669
|
if (envPort) {
|
|
@@ -5904,20 +6675,20 @@ function detectDevPort(projectPath) {
|
|
|
5904
6675
|
console.log(`[PortDetect] Found port ${scriptPort} in package.json dev script`);
|
|
5905
6676
|
return scriptPort;
|
|
5906
6677
|
}
|
|
5907
|
-
console.log(`[PortDetect] Using default port ${
|
|
5908
|
-
return
|
|
6678
|
+
console.log(`[PortDetect] Using default port ${DEFAULT_PORT2}`);
|
|
6679
|
+
return DEFAULT_PORT2;
|
|
5909
6680
|
}
|
|
5910
6681
|
function getPortFromEnv(projectPath) {
|
|
5911
6682
|
const envPaths = [
|
|
5912
|
-
|
|
5913
|
-
|
|
5914
|
-
|
|
5915
|
-
|
|
6683
|
+
path14.join(projectPath, ".env"),
|
|
6684
|
+
path14.join(projectPath, ".env.local"),
|
|
6685
|
+
path14.join(projectPath, ".env.development"),
|
|
6686
|
+
path14.join(projectPath, ".env.development.local")
|
|
5916
6687
|
];
|
|
5917
6688
|
for (const envPath of envPaths) {
|
|
5918
6689
|
try {
|
|
5919
|
-
if (!
|
|
5920
|
-
const content =
|
|
6690
|
+
if (!fs13.existsSync(envPath)) continue;
|
|
6691
|
+
const content = fs13.readFileSync(envPath, "utf-8");
|
|
5921
6692
|
const lines = content.split("\n");
|
|
5922
6693
|
for (const line of lines) {
|
|
5923
6694
|
const match = line.match(/^\s*PORT\s*=\s*["']?(\d+)["']?\s*(?:#.*)?$/);
|
|
@@ -5934,10 +6705,10 @@ function getPortFromEnv(projectPath) {
|
|
|
5934
6705
|
return null;
|
|
5935
6706
|
}
|
|
5936
6707
|
function getPortFromPackageJson(projectPath) {
|
|
5937
|
-
const packageJsonPath =
|
|
6708
|
+
const packageJsonPath = path14.join(projectPath, "package.json");
|
|
5938
6709
|
try {
|
|
5939
|
-
if (!
|
|
5940
|
-
const content =
|
|
6710
|
+
if (!fs13.existsSync(packageJsonPath)) return null;
|
|
6711
|
+
const content = fs13.readFileSync(packageJsonPath, "utf-8");
|
|
5941
6712
|
const pkg = JSON.parse(content);
|
|
5942
6713
|
const devScript = pkg.scripts?.dev;
|
|
5943
6714
|
if (!devScript) return null;
|
|
@@ -5961,9 +6732,9 @@ function getPortFromPackageJson(projectPath) {
|
|
|
5961
6732
|
}
|
|
5962
6733
|
|
|
5963
6734
|
// src/daemon/worktree-manager.ts
|
|
5964
|
-
var
|
|
5965
|
-
var
|
|
5966
|
-
var
|
|
6735
|
+
var fs14 = __toESM(require("fs"));
|
|
6736
|
+
var path15 = __toESM(require("path"));
|
|
6737
|
+
var import_core9 = __toESM(require_dist());
|
|
5967
6738
|
function validateModuleUid(moduleUid) {
|
|
5968
6739
|
if (!moduleUid || typeof moduleUid !== "string" || !moduleUid.trim()) {
|
|
5969
6740
|
return false;
|
|
@@ -5986,9 +6757,9 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5986
6757
|
// ============================================================
|
|
5987
6758
|
this.lockPath = "";
|
|
5988
6759
|
this.projectRoot = projectRoot;
|
|
5989
|
-
this.bareRepoPath =
|
|
5990
|
-
this.configPath =
|
|
5991
|
-
this.gitExecutor = new
|
|
6760
|
+
this.bareRepoPath = path15.join(projectRoot, ".bare");
|
|
6761
|
+
this.configPath = path15.join(projectRoot, ".episoda", "config.json");
|
|
6762
|
+
this.gitExecutor = new import_core9.GitExecutor();
|
|
5992
6763
|
}
|
|
5993
6764
|
/**
|
|
5994
6765
|
* Initialize worktree manager from existing project root
|
|
@@ -5996,10 +6767,10 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5996
6767
|
* @returns true if valid project, false otherwise
|
|
5997
6768
|
*/
|
|
5998
6769
|
async initialize() {
|
|
5999
|
-
if (!
|
|
6770
|
+
if (!fs14.existsSync(this.bareRepoPath)) {
|
|
6000
6771
|
return false;
|
|
6001
6772
|
}
|
|
6002
|
-
if (!
|
|
6773
|
+
if (!fs14.existsSync(this.configPath)) {
|
|
6003
6774
|
return false;
|
|
6004
6775
|
}
|
|
6005
6776
|
try {
|
|
@@ -6019,10 +6790,10 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6019
6790
|
*/
|
|
6020
6791
|
async ensureFetchRefspecConfigured() {
|
|
6021
6792
|
try {
|
|
6022
|
-
const { execSync:
|
|
6793
|
+
const { execSync: execSync8 } = require("child_process");
|
|
6023
6794
|
let fetchRefspec = null;
|
|
6024
6795
|
try {
|
|
6025
|
-
fetchRefspec =
|
|
6796
|
+
fetchRefspec = execSync8("git config --get remote.origin.fetch", {
|
|
6026
6797
|
cwd: this.bareRepoPath,
|
|
6027
6798
|
encoding: "utf-8",
|
|
6028
6799
|
timeout: 5e3
|
|
@@ -6031,7 +6802,7 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6031
6802
|
}
|
|
6032
6803
|
if (!fetchRefspec) {
|
|
6033
6804
|
console.log("[WorktreeManager] EP1014: Configuring missing fetch refspec for bare repo");
|
|
6034
|
-
|
|
6805
|
+
execSync8('git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*"', {
|
|
6035
6806
|
cwd: this.bareRepoPath,
|
|
6036
6807
|
timeout: 5e3
|
|
6037
6808
|
});
|
|
@@ -6046,8 +6817,8 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6046
6817
|
*/
|
|
6047
6818
|
static async createProject(projectRoot, repoUrl, projectId, workspaceSlug, projectSlug) {
|
|
6048
6819
|
const manager = new _WorktreeManager(projectRoot);
|
|
6049
|
-
const episodaDir =
|
|
6050
|
-
|
|
6820
|
+
const episodaDir = path15.join(projectRoot, ".episoda");
|
|
6821
|
+
fs14.mkdirSync(episodaDir, { recursive: true });
|
|
6051
6822
|
const cloneResult = await manager.gitExecutor.execute({
|
|
6052
6823
|
action: "clone_bare",
|
|
6053
6824
|
url: repoUrl,
|
|
@@ -6078,7 +6849,7 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6078
6849
|
error: `Invalid module UID: "${moduleUid}" - contains disallowed characters`
|
|
6079
6850
|
};
|
|
6080
6851
|
}
|
|
6081
|
-
const worktreePath =
|
|
6852
|
+
const worktreePath = path15.join(this.projectRoot, moduleUid);
|
|
6082
6853
|
const lockAcquired = await this.acquireLock();
|
|
6083
6854
|
if (!lockAcquired) {
|
|
6084
6855
|
return {
|
|
@@ -6260,7 +7031,7 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6260
7031
|
let prunedCount = 0;
|
|
6261
7032
|
await this.updateConfigSafe((config) => {
|
|
6262
7033
|
const initialCount = config.worktrees.length;
|
|
6263
|
-
config.worktrees = config.worktrees.filter((w) =>
|
|
7034
|
+
config.worktrees = config.worktrees.filter((w) => fs14.existsSync(w.worktreePath));
|
|
6264
7035
|
prunedCount = initialCount - config.worktrees.length;
|
|
6265
7036
|
return config;
|
|
6266
7037
|
});
|
|
@@ -6341,16 +7112,16 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6341
7112
|
const retryInterval = 50;
|
|
6342
7113
|
while (Date.now() - startTime < timeoutMs) {
|
|
6343
7114
|
try {
|
|
6344
|
-
|
|
7115
|
+
fs14.writeFileSync(lockPath, String(process.pid), { flag: "wx" });
|
|
6345
7116
|
return true;
|
|
6346
7117
|
} catch (err) {
|
|
6347
7118
|
if (err.code === "EEXIST") {
|
|
6348
7119
|
try {
|
|
6349
|
-
const stats =
|
|
7120
|
+
const stats = fs14.statSync(lockPath);
|
|
6350
7121
|
const lockAge = Date.now() - stats.mtimeMs;
|
|
6351
7122
|
if (lockAge > 3e4) {
|
|
6352
7123
|
try {
|
|
6353
|
-
const lockContent =
|
|
7124
|
+
const lockContent = fs14.readFileSync(lockPath, "utf-8").trim();
|
|
6354
7125
|
const lockPid = parseInt(lockContent, 10);
|
|
6355
7126
|
if (!isNaN(lockPid) && this.isProcessRunning(lockPid)) {
|
|
6356
7127
|
await new Promise((resolve3) => setTimeout(resolve3, retryInterval));
|
|
@@ -6359,7 +7130,7 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6359
7130
|
} catch {
|
|
6360
7131
|
}
|
|
6361
7132
|
try {
|
|
6362
|
-
|
|
7133
|
+
fs14.unlinkSync(lockPath);
|
|
6363
7134
|
} catch {
|
|
6364
7135
|
}
|
|
6365
7136
|
continue;
|
|
@@ -6380,16 +7151,16 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6380
7151
|
*/
|
|
6381
7152
|
releaseLock() {
|
|
6382
7153
|
try {
|
|
6383
|
-
|
|
7154
|
+
fs14.unlinkSync(this.getLockPath());
|
|
6384
7155
|
} catch {
|
|
6385
7156
|
}
|
|
6386
7157
|
}
|
|
6387
7158
|
readConfig() {
|
|
6388
7159
|
try {
|
|
6389
|
-
if (!
|
|
7160
|
+
if (!fs14.existsSync(this.configPath)) {
|
|
6390
7161
|
return null;
|
|
6391
7162
|
}
|
|
6392
|
-
const content =
|
|
7163
|
+
const content = fs14.readFileSync(this.configPath, "utf-8");
|
|
6393
7164
|
return JSON.parse(content);
|
|
6394
7165
|
} catch (error) {
|
|
6395
7166
|
console.error("[WorktreeManager] Failed to read config:", error);
|
|
@@ -6398,11 +7169,11 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6398
7169
|
}
|
|
6399
7170
|
writeConfig(config) {
|
|
6400
7171
|
try {
|
|
6401
|
-
const dir =
|
|
6402
|
-
if (!
|
|
6403
|
-
|
|
7172
|
+
const dir = path15.dirname(this.configPath);
|
|
7173
|
+
if (!fs14.existsSync(dir)) {
|
|
7174
|
+
fs14.mkdirSync(dir, { recursive: true });
|
|
6404
7175
|
}
|
|
6405
|
-
|
|
7176
|
+
fs14.writeFileSync(this.configPath, JSON.stringify(config, null, 2), "utf-8");
|
|
6406
7177
|
} catch (error) {
|
|
6407
7178
|
console.error("[WorktreeManager] Failed to write config:", error);
|
|
6408
7179
|
throw error;
|
|
@@ -6483,14 +7254,14 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6483
7254
|
}
|
|
6484
7255
|
try {
|
|
6485
7256
|
for (const file of files) {
|
|
6486
|
-
const srcPath =
|
|
6487
|
-
const destPath =
|
|
6488
|
-
if (
|
|
6489
|
-
const destDir =
|
|
6490
|
-
if (!
|
|
6491
|
-
|
|
6492
|
-
}
|
|
6493
|
-
|
|
7257
|
+
const srcPath = path15.join(mainWorktree.worktreePath, file);
|
|
7258
|
+
const destPath = path15.join(worktree.worktreePath, file);
|
|
7259
|
+
if (fs14.existsSync(srcPath)) {
|
|
7260
|
+
const destDir = path15.dirname(destPath);
|
|
7261
|
+
if (!fs14.existsSync(destDir)) {
|
|
7262
|
+
fs14.mkdirSync(destDir, { recursive: true });
|
|
7263
|
+
}
|
|
7264
|
+
fs14.copyFileSync(srcPath, destPath);
|
|
6494
7265
|
console.log(`[WorktreeManager] EP964: Copied ${file} to ${moduleUid} (deprecated)`);
|
|
6495
7266
|
} else {
|
|
6496
7267
|
console.log(`[WorktreeManager] EP964: Skipped ${file} (not found in main)`);
|
|
@@ -6521,8 +7292,8 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6521
7292
|
console.log(`[WorktreeManager] EP959: Timeout: ${TIMEOUT_MINUTES} minutes`);
|
|
6522
7293
|
console.log(`[WorktreeManager] EP959: Script: ${scriptPreview}`);
|
|
6523
7294
|
try {
|
|
6524
|
-
const { execSync:
|
|
6525
|
-
|
|
7295
|
+
const { execSync: execSync8 } = require("child_process");
|
|
7296
|
+
execSync8(script, {
|
|
6526
7297
|
cwd: worktree.worktreePath,
|
|
6527
7298
|
stdio: "inherit",
|
|
6528
7299
|
timeout: TIMEOUT_MINUTES * 60 * 1e3,
|
|
@@ -6556,8 +7327,8 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6556
7327
|
console.log(`[WorktreeManager] EP959: Timeout: ${TIMEOUT_MINUTES} minutes`);
|
|
6557
7328
|
console.log(`[WorktreeManager] EP959: Script: ${scriptPreview}`);
|
|
6558
7329
|
try {
|
|
6559
|
-
const { execSync:
|
|
6560
|
-
|
|
7330
|
+
const { execSync: execSync8 } = require("child_process");
|
|
7331
|
+
execSync8(script, {
|
|
6561
7332
|
cwd: worktree.worktreePath,
|
|
6562
7333
|
stdio: "inherit",
|
|
6563
7334
|
timeout: TIMEOUT_MINUTES * 60 * 1e3,
|
|
@@ -6573,27 +7344,27 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6573
7344
|
}
|
|
6574
7345
|
};
|
|
6575
7346
|
function getEpisodaRoot() {
|
|
6576
|
-
return process.env.EPISODA_ROOT ||
|
|
7347
|
+
return process.env.EPISODA_ROOT || path15.join(require("os").homedir(), "episoda");
|
|
6577
7348
|
}
|
|
6578
7349
|
async function isWorktreeProject(projectRoot) {
|
|
6579
7350
|
const manager = new WorktreeManager(projectRoot);
|
|
6580
7351
|
return manager.initialize();
|
|
6581
7352
|
}
|
|
6582
7353
|
async function findProjectRoot(startPath) {
|
|
6583
|
-
let current =
|
|
7354
|
+
let current = path15.resolve(startPath);
|
|
6584
7355
|
const episodaRoot = getEpisodaRoot();
|
|
6585
7356
|
if (!current.startsWith(episodaRoot)) {
|
|
6586
7357
|
return null;
|
|
6587
7358
|
}
|
|
6588
7359
|
for (let i = 0; i < 10; i++) {
|
|
6589
|
-
const bareDir =
|
|
6590
|
-
const episodaDir =
|
|
6591
|
-
if (
|
|
7360
|
+
const bareDir = path15.join(current, ".bare");
|
|
7361
|
+
const episodaDir = path15.join(current, ".episoda");
|
|
7362
|
+
if (fs14.existsSync(bareDir) && fs14.existsSync(episodaDir)) {
|
|
6592
7363
|
if (await isWorktreeProject(current)) {
|
|
6593
7364
|
return current;
|
|
6594
7365
|
}
|
|
6595
7366
|
}
|
|
6596
|
-
const parent =
|
|
7367
|
+
const parent = path15.dirname(current);
|
|
6597
7368
|
if (parent === current) {
|
|
6598
7369
|
break;
|
|
6599
7370
|
}
|
|
@@ -6603,24 +7374,24 @@ async function findProjectRoot(startPath) {
|
|
|
6603
7374
|
}
|
|
6604
7375
|
|
|
6605
7376
|
// src/utils/worktree.ts
|
|
6606
|
-
var
|
|
6607
|
-
var
|
|
7377
|
+
var path16 = __toESM(require("path"));
|
|
7378
|
+
var fs15 = __toESM(require("fs"));
|
|
6608
7379
|
var os5 = __toESM(require("os"));
|
|
6609
|
-
var
|
|
7380
|
+
var import_core10 = __toESM(require_dist());
|
|
6610
7381
|
function getEpisodaRoot2() {
|
|
6611
|
-
return process.env.EPISODA_ROOT ||
|
|
7382
|
+
return process.env.EPISODA_ROOT || path16.join(os5.homedir(), "episoda");
|
|
6612
7383
|
}
|
|
6613
7384
|
function getWorktreeInfo(moduleUid, workspaceSlug, projectSlug) {
|
|
6614
7385
|
const root = getEpisodaRoot2();
|
|
6615
|
-
const worktreePath =
|
|
7386
|
+
const worktreePath = path16.join(root, workspaceSlug, projectSlug, moduleUid);
|
|
6616
7387
|
return {
|
|
6617
7388
|
path: worktreePath,
|
|
6618
|
-
exists:
|
|
7389
|
+
exists: fs15.existsSync(worktreePath),
|
|
6619
7390
|
moduleUid
|
|
6620
7391
|
};
|
|
6621
7392
|
}
|
|
6622
7393
|
async function getWorktreeInfoForModule(moduleUid) {
|
|
6623
|
-
const config = await (0,
|
|
7394
|
+
const config = await (0, import_core10.loadConfig)();
|
|
6624
7395
|
if (!config?.workspace_slug || !config?.project_slug) {
|
|
6625
7396
|
console.warn("[Worktree] Missing workspace_slug or project_slug in config");
|
|
6626
7397
|
return null;
|
|
@@ -6639,61 +7410,61 @@ function clearAllPorts() {
|
|
|
6639
7410
|
}
|
|
6640
7411
|
|
|
6641
7412
|
// src/framework-detector.ts
|
|
6642
|
-
var
|
|
6643
|
-
var
|
|
7413
|
+
var fs16 = __toESM(require("fs"));
|
|
7414
|
+
var path17 = __toESM(require("path"));
|
|
6644
7415
|
function getInstallCommand(cwd) {
|
|
6645
|
-
if (
|
|
7416
|
+
if (fs16.existsSync(path17.join(cwd, "bun.lockb"))) {
|
|
6646
7417
|
return {
|
|
6647
7418
|
command: ["bun", "install"],
|
|
6648
7419
|
description: "Installing dependencies with bun",
|
|
6649
7420
|
detectedFrom: "bun.lockb"
|
|
6650
7421
|
};
|
|
6651
7422
|
}
|
|
6652
|
-
if (
|
|
7423
|
+
if (fs16.existsSync(path17.join(cwd, "pnpm-lock.yaml"))) {
|
|
6653
7424
|
return {
|
|
6654
7425
|
command: ["pnpm", "install"],
|
|
6655
7426
|
description: "Installing dependencies with pnpm",
|
|
6656
7427
|
detectedFrom: "pnpm-lock.yaml"
|
|
6657
7428
|
};
|
|
6658
7429
|
}
|
|
6659
|
-
if (
|
|
7430
|
+
if (fs16.existsSync(path17.join(cwd, "yarn.lock"))) {
|
|
6660
7431
|
return {
|
|
6661
7432
|
command: ["yarn", "install"],
|
|
6662
7433
|
description: "Installing dependencies with yarn",
|
|
6663
7434
|
detectedFrom: "yarn.lock"
|
|
6664
7435
|
};
|
|
6665
7436
|
}
|
|
6666
|
-
if (
|
|
7437
|
+
if (fs16.existsSync(path17.join(cwd, "package-lock.json"))) {
|
|
6667
7438
|
return {
|
|
6668
7439
|
command: ["npm", "ci"],
|
|
6669
7440
|
description: "Installing dependencies with npm ci",
|
|
6670
7441
|
detectedFrom: "package-lock.json"
|
|
6671
7442
|
};
|
|
6672
7443
|
}
|
|
6673
|
-
if (
|
|
7444
|
+
if (fs16.existsSync(path17.join(cwd, "package.json"))) {
|
|
6674
7445
|
return {
|
|
6675
7446
|
command: ["npm", "install"],
|
|
6676
7447
|
description: "Installing dependencies with npm",
|
|
6677
7448
|
detectedFrom: "package.json"
|
|
6678
7449
|
};
|
|
6679
7450
|
}
|
|
6680
|
-
if (
|
|
7451
|
+
if (fs16.existsSync(path17.join(cwd, "Pipfile.lock")) || fs16.existsSync(path17.join(cwd, "Pipfile"))) {
|
|
6681
7452
|
return {
|
|
6682
7453
|
command: ["pipenv", "install"],
|
|
6683
7454
|
description: "Installing dependencies with pipenv",
|
|
6684
|
-
detectedFrom:
|
|
7455
|
+
detectedFrom: fs16.existsSync(path17.join(cwd, "Pipfile.lock")) ? "Pipfile.lock" : "Pipfile"
|
|
6685
7456
|
};
|
|
6686
7457
|
}
|
|
6687
|
-
if (
|
|
7458
|
+
if (fs16.existsSync(path17.join(cwd, "poetry.lock"))) {
|
|
6688
7459
|
return {
|
|
6689
7460
|
command: ["poetry", "install"],
|
|
6690
7461
|
description: "Installing dependencies with poetry",
|
|
6691
7462
|
detectedFrom: "poetry.lock"
|
|
6692
7463
|
};
|
|
6693
7464
|
}
|
|
6694
|
-
if (
|
|
6695
|
-
const pyprojectPath =
|
|
6696
|
-
const content =
|
|
7465
|
+
if (fs16.existsSync(path17.join(cwd, "pyproject.toml"))) {
|
|
7466
|
+
const pyprojectPath = path17.join(cwd, "pyproject.toml");
|
|
7467
|
+
const content = fs16.readFileSync(pyprojectPath, "utf-8");
|
|
6697
7468
|
if (content.includes("[tool.poetry]")) {
|
|
6698
7469
|
return {
|
|
6699
7470
|
command: ["poetry", "install"],
|
|
@@ -6702,41 +7473,41 @@ function getInstallCommand(cwd) {
|
|
|
6702
7473
|
};
|
|
6703
7474
|
}
|
|
6704
7475
|
}
|
|
6705
|
-
if (
|
|
7476
|
+
if (fs16.existsSync(path17.join(cwd, "requirements.txt"))) {
|
|
6706
7477
|
return {
|
|
6707
7478
|
command: ["pip", "install", "-r", "requirements.txt"],
|
|
6708
7479
|
description: "Installing dependencies with pip",
|
|
6709
7480
|
detectedFrom: "requirements.txt"
|
|
6710
7481
|
};
|
|
6711
7482
|
}
|
|
6712
|
-
if (
|
|
7483
|
+
if (fs16.existsSync(path17.join(cwd, "Gemfile.lock")) || fs16.existsSync(path17.join(cwd, "Gemfile"))) {
|
|
6713
7484
|
return {
|
|
6714
7485
|
command: ["bundle", "install"],
|
|
6715
7486
|
description: "Installing dependencies with bundler",
|
|
6716
|
-
detectedFrom:
|
|
7487
|
+
detectedFrom: fs16.existsSync(path17.join(cwd, "Gemfile.lock")) ? "Gemfile.lock" : "Gemfile"
|
|
6717
7488
|
};
|
|
6718
7489
|
}
|
|
6719
|
-
if (
|
|
7490
|
+
if (fs16.existsSync(path17.join(cwd, "go.sum")) || fs16.existsSync(path17.join(cwd, "go.mod"))) {
|
|
6720
7491
|
return {
|
|
6721
7492
|
command: ["go", "mod", "download"],
|
|
6722
7493
|
description: "Downloading Go modules",
|
|
6723
|
-
detectedFrom:
|
|
7494
|
+
detectedFrom: fs16.existsSync(path17.join(cwd, "go.sum")) ? "go.sum" : "go.mod"
|
|
6724
7495
|
};
|
|
6725
7496
|
}
|
|
6726
|
-
if (
|
|
7497
|
+
if (fs16.existsSync(path17.join(cwd, "Cargo.lock")) || fs16.existsSync(path17.join(cwd, "Cargo.toml"))) {
|
|
6727
7498
|
return {
|
|
6728
7499
|
command: ["cargo", "build"],
|
|
6729
7500
|
description: "Building Rust project (downloads dependencies)",
|
|
6730
|
-
detectedFrom:
|
|
7501
|
+
detectedFrom: fs16.existsSync(path17.join(cwd, "Cargo.lock")) ? "Cargo.lock" : "Cargo.toml"
|
|
6731
7502
|
};
|
|
6732
7503
|
}
|
|
6733
7504
|
return null;
|
|
6734
7505
|
}
|
|
6735
7506
|
|
|
6736
7507
|
// src/daemon/daemon-process.ts
|
|
6737
|
-
var
|
|
7508
|
+
var fs17 = __toESM(require("fs"));
|
|
6738
7509
|
var os6 = __toESM(require("os"));
|
|
6739
|
-
var
|
|
7510
|
+
var path18 = __toESM(require("path"));
|
|
6740
7511
|
var packageJson = require_package();
|
|
6741
7512
|
async function ensureValidToken(config, bufferMs = 5 * 60 * 1e3) {
|
|
6742
7513
|
const now = Date.now();
|
|
@@ -6771,7 +7542,7 @@ async function ensureValidToken(config, bufferMs = 5 * 60 * 1e3) {
|
|
|
6771
7542
|
refresh_token: tokenResponse.refresh_token || config.refresh_token,
|
|
6772
7543
|
expires_at: now + tokenResponse.expires_in * 1e3
|
|
6773
7544
|
};
|
|
6774
|
-
await (0,
|
|
7545
|
+
await (0, import_core11.saveConfig)(updatedConfig);
|
|
6775
7546
|
console.log("[Daemon] EP904: Access token refreshed successfully");
|
|
6776
7547
|
return updatedConfig;
|
|
6777
7548
|
} catch (error) {
|
|
@@ -6780,7 +7551,7 @@ async function ensureValidToken(config, bufferMs = 5 * 60 * 1e3) {
|
|
|
6780
7551
|
}
|
|
6781
7552
|
}
|
|
6782
7553
|
async function fetchWithAuth(url, options = {}, retryOnUnauthorized = true) {
|
|
6783
|
-
let config = await (0,
|
|
7554
|
+
let config = await (0, import_core11.loadConfig)();
|
|
6784
7555
|
if (!config?.access_token) {
|
|
6785
7556
|
throw new Error("No access token configured");
|
|
6786
7557
|
}
|
|
@@ -6807,7 +7578,7 @@ async function fetchWithAuth(url, options = {}, retryOnUnauthorized = true) {
|
|
|
6807
7578
|
}
|
|
6808
7579
|
async function fetchEnvVars2() {
|
|
6809
7580
|
try {
|
|
6810
|
-
const config = await (0,
|
|
7581
|
+
const config = await (0, import_core11.loadConfig)();
|
|
6811
7582
|
if (!config?.project_id) {
|
|
6812
7583
|
console.warn("[Daemon] EP973: No project_id in config, cannot fetch env vars");
|
|
6813
7584
|
return {};
|
|
@@ -6892,7 +7663,7 @@ var Daemon = class _Daemon {
|
|
|
6892
7663
|
console.log("[Daemon] Starting Episoda daemon...");
|
|
6893
7664
|
this.machineId = await getMachineId();
|
|
6894
7665
|
console.log(`[Daemon] Machine ID: ${this.machineId}`);
|
|
6895
|
-
const config = await (0,
|
|
7666
|
+
const config = await (0, import_core11.loadConfig)();
|
|
6896
7667
|
if (config?.device_id) {
|
|
6897
7668
|
this.deviceId = config.device_id;
|
|
6898
7669
|
console.log(`[Daemon] Loaded cached Device ID (UUID): ${this.deviceId}`);
|
|
@@ -7029,7 +7800,7 @@ var Daemon = class _Daemon {
|
|
|
7029
7800
|
};
|
|
7030
7801
|
});
|
|
7031
7802
|
this.ipcServer.on("verify-server-connection", async () => {
|
|
7032
|
-
const config = await (0,
|
|
7803
|
+
const config = await (0, import_core11.loadConfig)();
|
|
7033
7804
|
if (!config?.access_token || !config?.api_url) {
|
|
7034
7805
|
return {
|
|
7035
7806
|
verified: false,
|
|
@@ -7203,7 +7974,7 @@ var Daemon = class _Daemon {
|
|
|
7203
7974
|
console.warn(`[Daemon] Stale connection detected for ${projectPath}, forcing reconnection`);
|
|
7204
7975
|
await this.disconnectProject(projectPath);
|
|
7205
7976
|
}
|
|
7206
|
-
const config = await (0,
|
|
7977
|
+
const config = await (0, import_core11.loadConfig)();
|
|
7207
7978
|
if (!config || !config.access_token) {
|
|
7208
7979
|
throw new Error("No access token found. Please run: episoda auth");
|
|
7209
7980
|
}
|
|
@@ -7224,8 +7995,8 @@ var Daemon = class _Daemon {
|
|
|
7224
7995
|
wsUrl = `${wsProtocol}//${wsHostname}:${wsPort}`;
|
|
7225
7996
|
}
|
|
7226
7997
|
console.log(`[Daemon] Connecting to ${wsUrl} for project ${projectId}...`);
|
|
7227
|
-
const client = new
|
|
7228
|
-
const gitExecutor = new
|
|
7998
|
+
const client = new import_core11.EpisodaClient();
|
|
7999
|
+
const gitExecutor = new import_core11.GitExecutor();
|
|
7229
8000
|
const connection = {
|
|
7230
8001
|
projectId,
|
|
7231
8002
|
projectPath,
|
|
@@ -7240,7 +8011,7 @@ var Daemon = class _Daemon {
|
|
|
7240
8011
|
client.updateActivity();
|
|
7241
8012
|
try {
|
|
7242
8013
|
const gitCmd = message.command;
|
|
7243
|
-
const bareRepoPath =
|
|
8014
|
+
const bareRepoPath = path18.join(projectPath, ".bare");
|
|
7244
8015
|
const cwd = gitCmd.worktreePath || bareRepoPath;
|
|
7245
8016
|
if (gitCmd.worktreePath) {
|
|
7246
8017
|
console.log(`[Daemon] Routing command to worktree: ${gitCmd.worktreePath}`);
|
|
@@ -7368,15 +8139,15 @@ var Daemon = class _Daemon {
|
|
|
7368
8139
|
client.on("tunnel_command", async (message) => {
|
|
7369
8140
|
if (message.type === "tunnel_command" && message.command) {
|
|
7370
8141
|
const cmd = message.command;
|
|
7371
|
-
console.log(`[Daemon] Received tunnel command for ${projectId}:`, cmd.action);
|
|
8142
|
+
console.log(`[Daemon] EP1024: Received tunnel command for ${projectId}:`, cmd.action);
|
|
7372
8143
|
client.updateActivity();
|
|
7373
8144
|
try {
|
|
7374
|
-
const
|
|
8145
|
+
const previewManager = getPreviewManager();
|
|
7375
8146
|
let result;
|
|
7376
8147
|
if (cmd.action === "start") {
|
|
7377
8148
|
const worktree = await getWorktreeInfoForModule(cmd.moduleUid);
|
|
7378
8149
|
if (!worktree) {
|
|
7379
|
-
console.error(`[Daemon]
|
|
8150
|
+
console.error(`[Daemon] EP1024: Cannot resolve worktree path for ${cmd.moduleUid}`);
|
|
7380
8151
|
await client.send({
|
|
7381
8152
|
type: "tunnel_result",
|
|
7382
8153
|
commandId: message.id,
|
|
@@ -7385,7 +8156,7 @@ var Daemon = class _Daemon {
|
|
|
7385
8156
|
return;
|
|
7386
8157
|
}
|
|
7387
8158
|
if (!worktree.exists) {
|
|
7388
|
-
console.error(`[Daemon]
|
|
8159
|
+
console.error(`[Daemon] EP1024: Worktree not found at ${worktree.path}`);
|
|
7389
8160
|
await client.send({
|
|
7390
8161
|
type: "tunnel_result",
|
|
7391
8162
|
commandId: message.id,
|
|
@@ -7393,118 +8164,31 @@ var Daemon = class _Daemon {
|
|
|
7393
8164
|
});
|
|
7394
8165
|
return;
|
|
7395
8166
|
}
|
|
7396
|
-
console.log(`[Daemon]
|
|
8167
|
+
console.log(`[Daemon] EP1024: Using worktree path ${worktree.path} for ${cmd.moduleUid}`);
|
|
7397
8168
|
const port = cmd.port || detectDevPort(worktree.path);
|
|
7398
|
-
const
|
|
7399
|
-
const
|
|
7400
|
-
|
|
7401
|
-
|
|
7402
|
-
|
|
7403
|
-
|
|
7404
|
-
|
|
7405
|
-
|
|
7406
|
-
|
|
7407
|
-
|
|
7408
|
-
|
|
7409
|
-
|
|
7410
|
-
|
|
7411
|
-
|
|
7412
|
-
|
|
7413
|
-
|
|
7414
|
-
|
|
7415
|
-
|
|
7416
|
-
|
|
7417
|
-
|
|
7418
|
-
console.warn(`[Daemon] Error reporting tunnel status:`, reportError);
|
|
7419
|
-
}
|
|
7420
|
-
}
|
|
7421
|
-
};
|
|
7422
|
-
(async () => {
|
|
7423
|
-
const MAX_RETRIES = 3;
|
|
7424
|
-
const RETRY_DELAY_MS = 3e3;
|
|
7425
|
-
await reportTunnelStatus({
|
|
7426
|
-
tunnel_started_at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
7427
|
-
tunnel_error: null
|
|
7428
|
-
// Clear any previous error
|
|
7429
|
-
});
|
|
7430
|
-
try {
|
|
7431
|
-
await tunnelManager.initialize();
|
|
7432
|
-
const devConfig = await (0, import_core10.loadConfig)();
|
|
7433
|
-
const devServerScript = devConfig?.project_settings?.worktree_dev_server_script;
|
|
7434
|
-
console.log(`[Daemon] EP973: Ensuring dev server is running in ${worktree.path} on port ${port}...`);
|
|
7435
|
-
const devServerResult = await ensureDevServer(worktree.path, port, cmd.moduleUid, devServerScript);
|
|
7436
|
-
if (!devServerResult.success) {
|
|
7437
|
-
const errorMsg2 = `Dev server failed to start: ${devServerResult.error}`;
|
|
7438
|
-
console.error(`[Daemon] ${errorMsg2}`);
|
|
7439
|
-
await reportTunnelStatus({ tunnel_error: errorMsg2 });
|
|
7440
|
-
return;
|
|
7441
|
-
}
|
|
7442
|
-
console.log(`[Daemon] Dev server ready on port ${port}`);
|
|
7443
|
-
let lastError;
|
|
7444
|
-
for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) {
|
|
7445
|
-
console.log(`[Daemon] Starting tunnel (attempt ${attempt}/${MAX_RETRIES})...`);
|
|
7446
|
-
const startResult = await tunnelManager.startTunnel({
|
|
7447
|
-
moduleUid: cmd.moduleUid,
|
|
7448
|
-
port,
|
|
7449
|
-
onUrl: async (url) => {
|
|
7450
|
-
console.log(`[Daemon] Tunnel URL for ${cmd.moduleUid}: ${url}`);
|
|
7451
|
-
await reportTunnelStatus({
|
|
7452
|
-
tunnel_url: url,
|
|
7453
|
-
tunnel_error: null
|
|
7454
|
-
// Clear error on success
|
|
7455
|
-
});
|
|
7456
|
-
},
|
|
7457
|
-
onStatusChange: (status, error) => {
|
|
7458
|
-
if (status === "error") {
|
|
7459
|
-
console.error(`[Daemon] Tunnel error for ${cmd.moduleUid}: ${error}`);
|
|
7460
|
-
reportTunnelStatus({ tunnel_error: error || "Tunnel connection error" });
|
|
7461
|
-
} else if (status === "reconnecting") {
|
|
7462
|
-
console.log(`[Daemon] Tunnel reconnecting for ${cmd.moduleUid}...`);
|
|
7463
|
-
}
|
|
7464
|
-
}
|
|
7465
|
-
});
|
|
7466
|
-
if (startResult.success) {
|
|
7467
|
-
console.log(`[Daemon] Tunnel started successfully for ${cmd.moduleUid}`);
|
|
7468
|
-
return;
|
|
7469
|
-
}
|
|
7470
|
-
lastError = startResult.error;
|
|
7471
|
-
console.warn(`[Daemon] Tunnel start attempt ${attempt} failed: ${lastError}`);
|
|
7472
|
-
if (attempt < MAX_RETRIES) {
|
|
7473
|
-
console.log(`[Daemon] Retrying in ${RETRY_DELAY_MS}ms...`);
|
|
7474
|
-
await new Promise((resolve3) => setTimeout(resolve3, RETRY_DELAY_MS));
|
|
7475
|
-
}
|
|
7476
|
-
}
|
|
7477
|
-
const errorMsg = `Tunnel failed after ${MAX_RETRIES} attempts: ${lastError}`;
|
|
7478
|
-
console.error(`[Daemon] ${errorMsg}`);
|
|
7479
|
-
await reportTunnelStatus({ tunnel_error: errorMsg });
|
|
7480
|
-
} catch (error) {
|
|
7481
|
-
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
7482
|
-
console.error(`[Daemon] Async tunnel startup error:`, error);
|
|
7483
|
-
await reportTunnelStatus({ tunnel_error: `Unexpected error: ${errorMsg}` });
|
|
7484
|
-
}
|
|
7485
|
-
})();
|
|
7486
|
-
result = {
|
|
7487
|
-
success: true,
|
|
7488
|
-
previewUrl
|
|
7489
|
-
// Note: actual tunnel URL will be reported via API when ready
|
|
7490
|
-
};
|
|
7491
|
-
} else if (cmd.action === "stop") {
|
|
7492
|
-
await tunnelManager.stopTunnel(cmd.moduleUid);
|
|
7493
|
-
await stopDevServer(cmd.moduleUid);
|
|
7494
|
-
const config2 = await (0, import_core10.loadConfig)();
|
|
7495
|
-
if (config2?.access_token) {
|
|
7496
|
-
try {
|
|
7497
|
-
const apiUrl = config2.api_url || "https://episoda.dev";
|
|
7498
|
-
await fetch(`${apiUrl}/api/modules/${cmd.moduleUid}/tunnel`, {
|
|
7499
|
-
method: "DELETE",
|
|
7500
|
-
headers: {
|
|
7501
|
-
"Authorization": `Bearer ${config2.access_token}`
|
|
7502
|
-
}
|
|
7503
|
-
});
|
|
7504
|
-
console.log(`[Daemon] Tunnel URL cleared for ${cmd.moduleUid}`);
|
|
7505
|
-
} catch {
|
|
7506
|
-
}
|
|
8169
|
+
const devConfig = await (0, import_core11.loadConfig)();
|
|
8170
|
+
const customCommand = devConfig?.project_settings?.worktree_dev_server_script;
|
|
8171
|
+
const startResult = await previewManager.startPreview({
|
|
8172
|
+
moduleUid: cmd.moduleUid,
|
|
8173
|
+
worktreePath: worktree.path,
|
|
8174
|
+
port,
|
|
8175
|
+
customCommand
|
|
8176
|
+
});
|
|
8177
|
+
if (startResult.success) {
|
|
8178
|
+
console.log(`[Daemon] EP1024: Preview started for ${cmd.moduleUid}: ${startResult.previewUrl}`);
|
|
8179
|
+
result = {
|
|
8180
|
+
success: true,
|
|
8181
|
+
previewUrl: startResult.previewUrl
|
|
8182
|
+
};
|
|
8183
|
+
} else {
|
|
8184
|
+
console.error(`[Daemon] EP1024: Preview failed for ${cmd.moduleUid}: ${startResult.error}`);
|
|
8185
|
+
result = {
|
|
8186
|
+
success: false,
|
|
8187
|
+
error: startResult.error || "Failed to start preview"
|
|
8188
|
+
};
|
|
7507
8189
|
}
|
|
8190
|
+
} else if (cmd.action === "stop") {
|
|
8191
|
+
await previewManager.stopPreview(cmd.moduleUid);
|
|
7508
8192
|
result = { success: true };
|
|
7509
8193
|
} else {
|
|
7510
8194
|
result = {
|
|
@@ -7517,7 +8201,7 @@ var Daemon = class _Daemon {
|
|
|
7517
8201
|
commandId: message.id,
|
|
7518
8202
|
result
|
|
7519
8203
|
});
|
|
7520
|
-
console.log(`[Daemon] Tunnel command ${cmd.action} completed for ${cmd.moduleUid}:`, result.success ? "success" : "failed");
|
|
8204
|
+
console.log(`[Daemon] EP1024: Tunnel command ${cmd.action} completed for ${cmd.moduleUid}:`, result.success ? "success" : "failed");
|
|
7521
8205
|
} catch (error) {
|
|
7522
8206
|
await client.send({
|
|
7523
8207
|
type: "tunnel_result",
|
|
@@ -7527,7 +8211,7 @@ var Daemon = class _Daemon {
|
|
|
7527
8211
|
error: error instanceof Error ? error.message : String(error)
|
|
7528
8212
|
}
|
|
7529
8213
|
});
|
|
7530
|
-
console.error(`[Daemon] Tunnel command execution error:`, error);
|
|
8214
|
+
console.error(`[Daemon] EP1024: Tunnel command execution error:`, error);
|
|
7531
8215
|
}
|
|
7532
8216
|
}
|
|
7533
8217
|
});
|
|
@@ -7742,8 +8426,8 @@ var Daemon = class _Daemon {
|
|
|
7742
8426
|
let daemonPid;
|
|
7743
8427
|
try {
|
|
7744
8428
|
const pidPath = getPidFilePath();
|
|
7745
|
-
if (
|
|
7746
|
-
const pidStr =
|
|
8429
|
+
if (fs17.existsSync(pidPath)) {
|
|
8430
|
+
const pidStr = fs17.readFileSync(pidPath, "utf-8").trim();
|
|
7747
8431
|
daemonPid = parseInt(pidStr, 10);
|
|
7748
8432
|
}
|
|
7749
8433
|
} catch (pidError) {
|
|
@@ -7822,29 +8506,29 @@ var Daemon = class _Daemon {
|
|
|
7822
8506
|
*/
|
|
7823
8507
|
async configureGitUser(projectPath, userId, workspaceId, machineId, projectId, deviceId) {
|
|
7824
8508
|
try {
|
|
7825
|
-
const { execSync:
|
|
7826
|
-
|
|
8509
|
+
const { execSync: execSync8 } = await import("child_process");
|
|
8510
|
+
execSync8(`git config episoda.userId ${userId}`, {
|
|
7827
8511
|
cwd: projectPath,
|
|
7828
8512
|
encoding: "utf8",
|
|
7829
8513
|
stdio: "pipe"
|
|
7830
8514
|
});
|
|
7831
|
-
|
|
8515
|
+
execSync8(`git config episoda.workspaceId ${workspaceId}`, {
|
|
7832
8516
|
cwd: projectPath,
|
|
7833
8517
|
encoding: "utf8",
|
|
7834
8518
|
stdio: "pipe"
|
|
7835
8519
|
});
|
|
7836
|
-
|
|
8520
|
+
execSync8(`git config episoda.machineId ${machineId}`, {
|
|
7837
8521
|
cwd: projectPath,
|
|
7838
8522
|
encoding: "utf8",
|
|
7839
8523
|
stdio: "pipe"
|
|
7840
8524
|
});
|
|
7841
|
-
|
|
8525
|
+
execSync8(`git config episoda.projectId ${projectId}`, {
|
|
7842
8526
|
cwd: projectPath,
|
|
7843
8527
|
encoding: "utf8",
|
|
7844
8528
|
stdio: "pipe"
|
|
7845
8529
|
});
|
|
7846
8530
|
if (deviceId) {
|
|
7847
|
-
|
|
8531
|
+
execSync8(`git config episoda.deviceId ${deviceId}`, {
|
|
7848
8532
|
cwd: projectPath,
|
|
7849
8533
|
encoding: "utf8",
|
|
7850
8534
|
stdio: "pipe"
|
|
@@ -7864,27 +8548,27 @@ var Daemon = class _Daemon {
|
|
|
7864
8548
|
*/
|
|
7865
8549
|
async installGitHooks(projectPath) {
|
|
7866
8550
|
const hooks = ["post-checkout", "pre-commit", "post-commit"];
|
|
7867
|
-
const hooksDir =
|
|
7868
|
-
if (!
|
|
8551
|
+
const hooksDir = path18.join(projectPath, ".git", "hooks");
|
|
8552
|
+
if (!fs17.existsSync(hooksDir)) {
|
|
7869
8553
|
console.warn(`[Daemon] Hooks directory not found: ${hooksDir}`);
|
|
7870
8554
|
return;
|
|
7871
8555
|
}
|
|
7872
8556
|
for (const hookName of hooks) {
|
|
7873
8557
|
try {
|
|
7874
|
-
const hookPath =
|
|
7875
|
-
const bundledHookPath =
|
|
7876
|
-
if (!
|
|
8558
|
+
const hookPath = path18.join(hooksDir, hookName);
|
|
8559
|
+
const bundledHookPath = path18.join(__dirname, "..", "hooks", hookName);
|
|
8560
|
+
if (!fs17.existsSync(bundledHookPath)) {
|
|
7877
8561
|
console.warn(`[Daemon] Bundled hook not found: ${bundledHookPath}`);
|
|
7878
8562
|
continue;
|
|
7879
8563
|
}
|
|
7880
|
-
const hookContent =
|
|
7881
|
-
if (
|
|
7882
|
-
const existingContent =
|
|
8564
|
+
const hookContent = fs17.readFileSync(bundledHookPath, "utf-8");
|
|
8565
|
+
if (fs17.existsSync(hookPath)) {
|
|
8566
|
+
const existingContent = fs17.readFileSync(hookPath, "utf-8");
|
|
7883
8567
|
if (existingContent === hookContent) {
|
|
7884
8568
|
continue;
|
|
7885
8569
|
}
|
|
7886
8570
|
}
|
|
7887
|
-
|
|
8571
|
+
fs17.writeFileSync(hookPath, hookContent, { mode: 493 });
|
|
7888
8572
|
console.log(`[Daemon] Installed git hook: ${hookName}`);
|
|
7889
8573
|
} catch (error) {
|
|
7890
8574
|
console.warn(`[Daemon] Failed to install ${hookName} hook:`, error instanceof Error ? error.message : error);
|
|
@@ -7899,7 +8583,7 @@ var Daemon = class _Daemon {
|
|
|
7899
8583
|
*/
|
|
7900
8584
|
async cacheDeviceId(deviceId) {
|
|
7901
8585
|
try {
|
|
7902
|
-
const config = await (0,
|
|
8586
|
+
const config = await (0, import_core11.loadConfig)();
|
|
7903
8587
|
if (!config) {
|
|
7904
8588
|
console.warn("[Daemon] Cannot cache device ID - no config found");
|
|
7905
8589
|
return;
|
|
@@ -7912,7 +8596,7 @@ var Daemon = class _Daemon {
|
|
|
7912
8596
|
device_id: deviceId,
|
|
7913
8597
|
machine_id: this.machineId
|
|
7914
8598
|
};
|
|
7915
|
-
await (0,
|
|
8599
|
+
await (0, import_core11.saveConfig)(updatedConfig);
|
|
7916
8600
|
console.log(`[Daemon] Cached device ID to config: ${deviceId}`);
|
|
7917
8601
|
} catch (error) {
|
|
7918
8602
|
console.warn("[Daemon] Failed to cache device ID:", error instanceof Error ? error.message : error);
|
|
@@ -7926,7 +8610,7 @@ var Daemon = class _Daemon {
|
|
|
7926
8610
|
*/
|
|
7927
8611
|
async syncProjectSettings(projectId) {
|
|
7928
8612
|
try {
|
|
7929
|
-
const config = await (0,
|
|
8613
|
+
const config = await (0, import_core11.loadConfig)();
|
|
7930
8614
|
if (!config) return;
|
|
7931
8615
|
const apiUrl = config.api_url || "https://episoda.dev";
|
|
7932
8616
|
const response = await fetchWithAuth(`${apiUrl}/api/projects/${projectId}/settings`);
|
|
@@ -7960,7 +8644,7 @@ var Daemon = class _Daemon {
|
|
|
7960
8644
|
cached_at: Date.now()
|
|
7961
8645
|
}
|
|
7962
8646
|
};
|
|
7963
|
-
await (0,
|
|
8647
|
+
await (0, import_core11.saveConfig)(updatedConfig);
|
|
7964
8648
|
console.log(`[Daemon] EP973: Project settings synced (slugs: ${projectSlug}/${workspaceSlug})`);
|
|
7965
8649
|
}
|
|
7966
8650
|
} catch (error) {
|
|
@@ -7980,7 +8664,7 @@ var Daemon = class _Daemon {
|
|
|
7980
8664
|
console.warn("[Daemon] EP995: Cannot sync project path - deviceId not available");
|
|
7981
8665
|
return;
|
|
7982
8666
|
}
|
|
7983
|
-
const config = await (0,
|
|
8667
|
+
const config = await (0, import_core11.loadConfig)();
|
|
7984
8668
|
if (!config) return;
|
|
7985
8669
|
const apiUrl = config.api_url || "https://episoda.dev";
|
|
7986
8670
|
const response = await fetchWithAuth(`${apiUrl}/api/account/machines/${this.deviceId}`, {
|
|
@@ -8013,7 +8697,7 @@ var Daemon = class _Daemon {
|
|
|
8013
8697
|
*/
|
|
8014
8698
|
async updateModuleWorktreeStatus(moduleUid, status, worktreePath, errorMessage) {
|
|
8015
8699
|
try {
|
|
8016
|
-
const config = await (0,
|
|
8700
|
+
const config = await (0, import_core11.loadConfig)();
|
|
8017
8701
|
if (!config) return;
|
|
8018
8702
|
const apiUrl = config.api_url || "https://episoda.dev";
|
|
8019
8703
|
const body = {
|
|
@@ -8068,7 +8752,7 @@ var Daemon = class _Daemon {
|
|
|
8068
8752
|
console.log("[Daemon] EP1003: Cannot reconcile - deviceId not available yet");
|
|
8069
8753
|
return;
|
|
8070
8754
|
}
|
|
8071
|
-
const config = await (0,
|
|
8755
|
+
const config = await (0, import_core11.loadConfig)();
|
|
8072
8756
|
if (!config) return;
|
|
8073
8757
|
const apiUrl = config.api_url || "https://episoda.dev";
|
|
8074
8758
|
const controller = new AbortController();
|
|
@@ -8180,7 +8864,7 @@ var Daemon = class _Daemon {
|
|
|
8180
8864
|
console.log(`[Daemon] EP994: No worktree to remove for ${moduleUid}`);
|
|
8181
8865
|
}
|
|
8182
8866
|
try {
|
|
8183
|
-
const cleanupConfig = await (0,
|
|
8867
|
+
const cleanupConfig = await (0, import_core11.loadConfig)();
|
|
8184
8868
|
const cleanupApiUrl = cleanupConfig?.api_url || "https://episoda.dev";
|
|
8185
8869
|
await fetchWithAuth(`${cleanupApiUrl}/api/modules/${moduleUid}`, {
|
|
8186
8870
|
method: "PATCH",
|
|
@@ -8212,7 +8896,7 @@ var Daemon = class _Daemon {
|
|
|
8212
8896
|
try {
|
|
8213
8897
|
const envVars = await fetchEnvVars2();
|
|
8214
8898
|
console.log(`[Daemon] EP1002: Fetched ${Object.keys(envVars).length} env vars for ${moduleUid}`);
|
|
8215
|
-
const config = await (0,
|
|
8899
|
+
const config = await (0, import_core11.loadConfig)();
|
|
8216
8900
|
const setupConfig = config?.project_settings;
|
|
8217
8901
|
await this.runWorktreeSetupSync(
|
|
8218
8902
|
moduleUid,
|
|
@@ -8258,8 +8942,8 @@ var Daemon = class _Daemon {
|
|
|
8258
8942
|
console.log(`[Daemon] EP1002: ${installCmd.description} (detected from ${installCmd.detectedFrom})`);
|
|
8259
8943
|
console.log(`[Daemon] EP1002: Running: ${installCmd.command.join(" ")}`);
|
|
8260
8944
|
try {
|
|
8261
|
-
const { execSync:
|
|
8262
|
-
|
|
8945
|
+
const { execSync: execSync8 } = await import("child_process");
|
|
8946
|
+
execSync8(installCmd.command.join(" "), {
|
|
8263
8947
|
cwd: worktreePath,
|
|
8264
8948
|
stdio: "inherit",
|
|
8265
8949
|
timeout: 10 * 60 * 1e3,
|
|
@@ -8312,8 +8996,8 @@ var Daemon = class _Daemon {
|
|
|
8312
8996
|
console.log(`[Daemon] EP986: ${installCmd.description} (detected from ${installCmd.detectedFrom})`);
|
|
8313
8997
|
console.log(`[Daemon] EP986: Running: ${installCmd.command.join(" ")}`);
|
|
8314
8998
|
try {
|
|
8315
|
-
const { execSync:
|
|
8316
|
-
|
|
8999
|
+
const { execSync: execSync8 } = await import("child_process");
|
|
9000
|
+
execSync8(installCmd.command.join(" "), {
|
|
8317
9001
|
cwd: worktreePath,
|
|
8318
9002
|
stdio: "inherit",
|
|
8319
9003
|
timeout: 10 * 60 * 1e3,
|
|
@@ -8390,7 +9074,7 @@ var Daemon = class _Daemon {
|
|
|
8390
9074
|
}
|
|
8391
9075
|
this.healthCheckInProgress = true;
|
|
8392
9076
|
try {
|
|
8393
|
-
const config = await (0,
|
|
9077
|
+
const config = await (0, import_core11.loadConfig)();
|
|
8394
9078
|
if (config?.access_token) {
|
|
8395
9079
|
await this.performHealthChecks(config);
|
|
8396
9080
|
}
|
|
@@ -8509,7 +9193,7 @@ var Daemon = class _Daemon {
|
|
|
8509
9193
|
*/
|
|
8510
9194
|
async fetchActiveModuleUids(projectId) {
|
|
8511
9195
|
try {
|
|
8512
|
-
const config = await (0,
|
|
9196
|
+
const config = await (0, import_core11.loadConfig)();
|
|
8513
9197
|
if (!config?.access_token || !config?.api_url) {
|
|
8514
9198
|
return null;
|
|
8515
9199
|
}
|
|
@@ -8609,84 +9293,76 @@ var Daemon = class _Daemon {
|
|
|
8609
9293
|
}
|
|
8610
9294
|
/**
|
|
8611
9295
|
* EP833: Restart a failed tunnel
|
|
8612
|
-
*
|
|
9296
|
+
* EP1024: Refactored to use PreviewManager for unified preview lifecycle
|
|
8613
9297
|
*/
|
|
8614
9298
|
async restartTunnel(moduleUid, port) {
|
|
8615
|
-
const
|
|
9299
|
+
const previewManager = getPreviewManager();
|
|
8616
9300
|
try {
|
|
8617
|
-
await
|
|
8618
|
-
const config = await (0, import_core10.loadConfig)();
|
|
9301
|
+
const config = await (0, import_core11.loadConfig)();
|
|
8619
9302
|
if (!config?.access_token) {
|
|
8620
9303
|
console.error(`[Daemon] EP833: No access token for tunnel restart`);
|
|
8621
9304
|
return;
|
|
8622
9305
|
}
|
|
8623
9306
|
const apiUrl = config.api_url || "https://episoda.dev";
|
|
8624
|
-
const
|
|
8625
|
-
if (
|
|
8626
|
-
console.log(`[Daemon]
|
|
8627
|
-
|
|
8628
|
-
|
|
8629
|
-
|
|
8630
|
-
if (moduleResponse.ok) {
|
|
8631
|
-
const moduleData = await moduleResponse.json();
|
|
8632
|
-
projectId = moduleData.moduleRecord?.project_id ?? null;
|
|
8633
|
-
}
|
|
8634
|
-
} catch (e) {
|
|
8635
|
-
console.warn(`[Daemon] EP833: Failed to fetch module details for project lookup`);
|
|
8636
|
-
}
|
|
8637
|
-
const worktree = await getWorktreeInfoForModule(moduleUid);
|
|
8638
|
-
if (!worktree) {
|
|
8639
|
-
console.error(`[Daemon] EP973: Cannot resolve worktree path for ${moduleUid} - missing config slugs`);
|
|
8640
|
-
return;
|
|
8641
|
-
}
|
|
8642
|
-
if (!worktree.exists) {
|
|
8643
|
-
console.error(`[Daemon] EP973: Worktree not found at ${worktree.path}`);
|
|
8644
|
-
return;
|
|
8645
|
-
}
|
|
8646
|
-
const { isPortInUse: isPortInUse2 } = await Promise.resolve().then(() => (init_port_check(), port_check_exports));
|
|
8647
|
-
if (await isPortInUse2(port)) {
|
|
8648
|
-
console.log(`[Daemon] EP932: Port ${port} in use, checking health...`);
|
|
8649
|
-
const healthy = await isDevServerHealthy(port);
|
|
8650
|
-
if (!healthy) {
|
|
8651
|
-
console.log(`[Daemon] EP932: Dev server on port ${port} is not responding, killing process...`);
|
|
8652
|
-
await killProcessOnPort(port);
|
|
8653
|
-
}
|
|
8654
|
-
}
|
|
8655
|
-
const devServerScript = config.project_settings?.worktree_dev_server_script;
|
|
8656
|
-
const startResult2 = await ensureDevServer(worktree.path, port, moduleUid, devServerScript);
|
|
8657
|
-
if (!startResult2.success) {
|
|
8658
|
-
console.error(`[Daemon] EP932: Failed to start dev server: ${startResult2.error}`);
|
|
8659
|
-
return;
|
|
8660
|
-
}
|
|
8661
|
-
}
|
|
8662
|
-
console.log(`[Daemon] EP932: Dev server ready, restarting tunnel for ${moduleUid}...`);
|
|
8663
|
-
const startResult = await tunnelManager.startTunnel({
|
|
8664
|
-
moduleUid,
|
|
8665
|
-
port,
|
|
8666
|
-
onUrl: async (url) => {
|
|
8667
|
-
console.log(`[Daemon] EP833: Tunnel restarted for ${moduleUid}: ${url}`);
|
|
9307
|
+
const status = previewManager.getStatus(moduleUid);
|
|
9308
|
+
if (status) {
|
|
9309
|
+
console.log(`[Daemon] EP1024: Restarting tracked preview for ${moduleUid}...`);
|
|
9310
|
+
const result2 = await previewManager.restartPreview(moduleUid);
|
|
9311
|
+
if (result2.success && result2.previewUrl) {
|
|
9312
|
+
console.log(`[Daemon] EP833: Preview restarted for ${moduleUid}: ${result2.previewUrl}`);
|
|
8668
9313
|
try {
|
|
8669
9314
|
await fetchWithAuth(`${apiUrl}/api/modules/${moduleUid}/tunnel`, {
|
|
8670
9315
|
method: "POST",
|
|
8671
9316
|
body: JSON.stringify({
|
|
8672
|
-
tunnel_url:
|
|
9317
|
+
tunnel_url: result2.previewUrl,
|
|
8673
9318
|
tunnel_error: null,
|
|
8674
9319
|
restart_reason: "health_check_failure"
|
|
8675
|
-
// EP1003: Server can track restart causes
|
|
8676
9320
|
})
|
|
8677
9321
|
});
|
|
8678
9322
|
} catch (e) {
|
|
8679
9323
|
console.warn(`[Daemon] EP833: Failed to report restarted tunnel URL`);
|
|
8680
9324
|
}
|
|
9325
|
+
} else {
|
|
9326
|
+
console.error(`[Daemon] EP833: Preview restart failed for ${moduleUid}: ${result2.error}`);
|
|
8681
9327
|
}
|
|
9328
|
+
return;
|
|
9329
|
+
}
|
|
9330
|
+
console.log(`[Daemon] EP1024: No tracked preview for ${moduleUid}, starting fresh...`);
|
|
9331
|
+
const worktree = await getWorktreeInfoForModule(moduleUid);
|
|
9332
|
+
if (!worktree) {
|
|
9333
|
+
console.error(`[Daemon] EP1024: Cannot resolve worktree path for ${moduleUid} - missing config slugs`);
|
|
9334
|
+
return;
|
|
9335
|
+
}
|
|
9336
|
+
if (!worktree.exists) {
|
|
9337
|
+
console.error(`[Daemon] EP1024: Worktree not found at ${worktree.path}`);
|
|
9338
|
+
return;
|
|
9339
|
+
}
|
|
9340
|
+
const devServerScript = config.project_settings?.worktree_dev_server_script;
|
|
9341
|
+
const result = await previewManager.startPreview({
|
|
9342
|
+
moduleUid,
|
|
9343
|
+
worktreePath: worktree.path,
|
|
9344
|
+
port,
|
|
9345
|
+
customCommand: devServerScript
|
|
8682
9346
|
});
|
|
8683
|
-
if (
|
|
8684
|
-
console.log(`[Daemon] EP833:
|
|
9347
|
+
if (result.success && result.previewUrl) {
|
|
9348
|
+
console.log(`[Daemon] EP833: Preview started for ${moduleUid}: ${result.previewUrl}`);
|
|
9349
|
+
try {
|
|
9350
|
+
await fetchWithAuth(`${apiUrl}/api/modules/${moduleUid}/tunnel`, {
|
|
9351
|
+
method: "POST",
|
|
9352
|
+
body: JSON.stringify({
|
|
9353
|
+
tunnel_url: result.previewUrl,
|
|
9354
|
+
tunnel_error: null,
|
|
9355
|
+
restart_reason: "health_check_failure"
|
|
9356
|
+
})
|
|
9357
|
+
});
|
|
9358
|
+
} catch (e) {
|
|
9359
|
+
console.warn(`[Daemon] EP833: Failed to report restarted tunnel URL`);
|
|
9360
|
+
}
|
|
8685
9361
|
} else {
|
|
8686
|
-
console.error(`[Daemon] EP833:
|
|
9362
|
+
console.error(`[Daemon] EP833: Preview start failed for ${moduleUid}: ${result.error}`);
|
|
8687
9363
|
}
|
|
8688
9364
|
} catch (error) {
|
|
8689
|
-
console.error(`[Daemon] EP833: Error restarting
|
|
9365
|
+
console.error(`[Daemon] EP833: Error restarting preview for ${moduleUid}:`, error);
|
|
8690
9366
|
}
|
|
8691
9367
|
}
|
|
8692
9368
|
/**
|
|
@@ -8864,8 +9540,8 @@ var Daemon = class _Daemon {
|
|
|
8864
9540
|
await this.shutdown();
|
|
8865
9541
|
try {
|
|
8866
9542
|
const pidPath = getPidFilePath();
|
|
8867
|
-
if (
|
|
8868
|
-
|
|
9543
|
+
if (fs17.existsSync(pidPath)) {
|
|
9544
|
+
fs17.unlinkSync(pidPath);
|
|
8869
9545
|
console.log("[Daemon] PID file cleaned up");
|
|
8870
9546
|
}
|
|
8871
9547
|
} catch (error) {
|