episoda 0.2.32 → 0.2.34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/daemon/daemon-process.js +346 -178
- package/dist/daemon/daemon-process.js.map +1 -1
- package/dist/index.js +671 -68
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
|
@@ -1556,15 +1556,15 @@ var require_git_executor = __commonJS({
|
|
|
1556
1556
|
try {
|
|
1557
1557
|
const { stdout: gitDir } = await execAsync2("git rev-parse --git-dir", { cwd, timeout: 5e3 });
|
|
1558
1558
|
const gitDirPath = gitDir.trim();
|
|
1559
|
-
const
|
|
1559
|
+
const fs17 = await Promise.resolve().then(() => __importStar(require("fs"))).then((m) => m.promises);
|
|
1560
1560
|
const rebaseMergePath = `${gitDirPath}/rebase-merge`;
|
|
1561
1561
|
const rebaseApplyPath = `${gitDirPath}/rebase-apply`;
|
|
1562
1562
|
try {
|
|
1563
|
-
await
|
|
1563
|
+
await fs17.access(rebaseMergePath);
|
|
1564
1564
|
inRebase = true;
|
|
1565
1565
|
} catch {
|
|
1566
1566
|
try {
|
|
1567
|
-
await
|
|
1567
|
+
await fs17.access(rebaseApplyPath);
|
|
1568
1568
|
inRebase = true;
|
|
1569
1569
|
} catch {
|
|
1570
1570
|
inRebase = false;
|
|
@@ -1618,9 +1618,9 @@ var require_git_executor = __commonJS({
|
|
|
1618
1618
|
error: validation.error || "UNKNOWN_ERROR"
|
|
1619
1619
|
};
|
|
1620
1620
|
}
|
|
1621
|
-
const
|
|
1621
|
+
const fs17 = await Promise.resolve().then(() => __importStar(require("fs"))).then((m) => m.promises);
|
|
1622
1622
|
try {
|
|
1623
|
-
await
|
|
1623
|
+
await fs17.access(command.path);
|
|
1624
1624
|
return {
|
|
1625
1625
|
success: false,
|
|
1626
1626
|
error: "WORKTREE_EXISTS",
|
|
@@ -1635,6 +1635,9 @@ var require_git_executor = __commonJS({
|
|
|
1635
1635
|
const args = ["worktree", "add"];
|
|
1636
1636
|
if (command.create) {
|
|
1637
1637
|
args.push("-b", command.branch, command.path);
|
|
1638
|
+
if (command.startPoint) {
|
|
1639
|
+
args.push(command.startPoint);
|
|
1640
|
+
}
|
|
1638
1641
|
} else {
|
|
1639
1642
|
args.push(command.path, command.branch);
|
|
1640
1643
|
}
|
|
@@ -1671,9 +1674,9 @@ var require_git_executor = __commonJS({
|
|
|
1671
1674
|
*/
|
|
1672
1675
|
async executeWorktreeRemove(command, cwd, options) {
|
|
1673
1676
|
try {
|
|
1674
|
-
const
|
|
1677
|
+
const fs17 = await Promise.resolve().then(() => __importStar(require("fs"))).then((m) => m.promises);
|
|
1675
1678
|
try {
|
|
1676
|
-
await
|
|
1679
|
+
await fs17.access(command.path);
|
|
1677
1680
|
} catch {
|
|
1678
1681
|
return {
|
|
1679
1682
|
success: false,
|
|
@@ -1826,10 +1829,10 @@ var require_git_executor = __commonJS({
|
|
|
1826
1829
|
*/
|
|
1827
1830
|
async executeCloneBare(command, options) {
|
|
1828
1831
|
try {
|
|
1829
|
-
const
|
|
1830
|
-
const
|
|
1832
|
+
const fs17 = await Promise.resolve().then(() => __importStar(require("fs"))).then((m) => m.promises);
|
|
1833
|
+
const path18 = await Promise.resolve().then(() => __importStar(require("path")));
|
|
1831
1834
|
try {
|
|
1832
|
-
await
|
|
1835
|
+
await fs17.access(command.path);
|
|
1833
1836
|
return {
|
|
1834
1837
|
success: false,
|
|
1835
1838
|
error: "BRANCH_ALREADY_EXISTS",
|
|
@@ -1838,9 +1841,9 @@ var require_git_executor = __commonJS({
|
|
|
1838
1841
|
};
|
|
1839
1842
|
} catch {
|
|
1840
1843
|
}
|
|
1841
|
-
const parentDir =
|
|
1844
|
+
const parentDir = path18.dirname(command.path);
|
|
1842
1845
|
try {
|
|
1843
|
-
await
|
|
1846
|
+
await fs17.mkdir(parentDir, { recursive: true });
|
|
1844
1847
|
} catch {
|
|
1845
1848
|
}
|
|
1846
1849
|
const { stdout, stderr } = await execAsync2(
|
|
@@ -1883,22 +1886,22 @@ var require_git_executor = __commonJS({
|
|
|
1883
1886
|
*/
|
|
1884
1887
|
async executeProjectInfo(cwd, options) {
|
|
1885
1888
|
try {
|
|
1886
|
-
const
|
|
1887
|
-
const
|
|
1889
|
+
const fs17 = await Promise.resolve().then(() => __importStar(require("fs"))).then((m) => m.promises);
|
|
1890
|
+
const path18 = await Promise.resolve().then(() => __importStar(require("path")));
|
|
1888
1891
|
let currentPath = cwd;
|
|
1889
1892
|
let projectPath = cwd;
|
|
1890
1893
|
let bareRepoPath;
|
|
1891
1894
|
for (let i = 0; i < 10; i++) {
|
|
1892
|
-
const bareDir =
|
|
1893
|
-
const episodaDir =
|
|
1895
|
+
const bareDir = path18.join(currentPath, ".bare");
|
|
1896
|
+
const episodaDir = path18.join(currentPath, ".episoda");
|
|
1894
1897
|
try {
|
|
1895
|
-
await
|
|
1896
|
-
await
|
|
1898
|
+
await fs17.access(bareDir);
|
|
1899
|
+
await fs17.access(episodaDir);
|
|
1897
1900
|
projectPath = currentPath;
|
|
1898
1901
|
bareRepoPath = bareDir;
|
|
1899
1902
|
break;
|
|
1900
1903
|
} catch {
|
|
1901
|
-
const parentPath =
|
|
1904
|
+
const parentPath = path18.dirname(currentPath);
|
|
1902
1905
|
if (parentPath === currentPath) {
|
|
1903
1906
|
break;
|
|
1904
1907
|
}
|
|
@@ -2489,34 +2492,34 @@ var require_auth = __commonJS({
|
|
|
2489
2492
|
Object.defineProperty(exports2, "__esModule", { value: true });
|
|
2490
2493
|
exports2.getConfigDir = getConfigDir6;
|
|
2491
2494
|
exports2.getConfigPath = getConfigPath;
|
|
2492
|
-
exports2.loadConfig =
|
|
2495
|
+
exports2.loadConfig = loadConfig6;
|
|
2493
2496
|
exports2.saveConfig = saveConfig2;
|
|
2494
2497
|
exports2.validateToken = validateToken;
|
|
2495
|
-
var
|
|
2496
|
-
var
|
|
2497
|
-
var
|
|
2498
|
+
var fs17 = __importStar(require("fs"));
|
|
2499
|
+
var path18 = __importStar(require("path"));
|
|
2500
|
+
var os7 = __importStar(require("os"));
|
|
2498
2501
|
var child_process_1 = require("child_process");
|
|
2499
2502
|
var DEFAULT_CONFIG_FILE = "config.json";
|
|
2500
2503
|
function getConfigDir6() {
|
|
2501
|
-
return process.env.EPISODA_CONFIG_DIR ||
|
|
2504
|
+
return process.env.EPISODA_CONFIG_DIR || path18.join(os7.homedir(), ".episoda");
|
|
2502
2505
|
}
|
|
2503
2506
|
function getConfigPath(configPath) {
|
|
2504
2507
|
if (configPath) {
|
|
2505
2508
|
return configPath;
|
|
2506
2509
|
}
|
|
2507
|
-
return
|
|
2510
|
+
return path18.join(getConfigDir6(), DEFAULT_CONFIG_FILE);
|
|
2508
2511
|
}
|
|
2509
2512
|
function ensureConfigDir(configPath) {
|
|
2510
|
-
const dir =
|
|
2511
|
-
const isNew = !
|
|
2513
|
+
const dir = path18.dirname(configPath);
|
|
2514
|
+
const isNew = !fs17.existsSync(dir);
|
|
2512
2515
|
if (isNew) {
|
|
2513
|
-
|
|
2516
|
+
fs17.mkdirSync(dir, { recursive: true, mode: 448 });
|
|
2514
2517
|
}
|
|
2515
2518
|
if (process.platform === "darwin") {
|
|
2516
|
-
const nosyncPath =
|
|
2517
|
-
if (isNew || !
|
|
2519
|
+
const nosyncPath = path18.join(dir, ".nosync");
|
|
2520
|
+
if (isNew || !fs17.existsSync(nosyncPath)) {
|
|
2518
2521
|
try {
|
|
2519
|
-
|
|
2522
|
+
fs17.writeFileSync(nosyncPath, "", { mode: 384 });
|
|
2520
2523
|
(0, child_process_1.execSync)(`xattr -w com.apple.fileprovider.ignore 1 "${dir}"`, {
|
|
2521
2524
|
stdio: "ignore",
|
|
2522
2525
|
timeout: 5e3
|
|
@@ -2526,13 +2529,13 @@ var require_auth = __commonJS({
|
|
|
2526
2529
|
}
|
|
2527
2530
|
}
|
|
2528
2531
|
}
|
|
2529
|
-
async function
|
|
2532
|
+
async function loadConfig6(configPath) {
|
|
2530
2533
|
const fullPath = getConfigPath(configPath);
|
|
2531
|
-
if (!
|
|
2534
|
+
if (!fs17.existsSync(fullPath)) {
|
|
2532
2535
|
return null;
|
|
2533
2536
|
}
|
|
2534
2537
|
try {
|
|
2535
|
-
const content =
|
|
2538
|
+
const content = fs17.readFileSync(fullPath, "utf8");
|
|
2536
2539
|
const config = JSON.parse(content);
|
|
2537
2540
|
return config;
|
|
2538
2541
|
} catch (error) {
|
|
@@ -2545,7 +2548,7 @@ var require_auth = __commonJS({
|
|
|
2545
2548
|
ensureConfigDir(fullPath);
|
|
2546
2549
|
try {
|
|
2547
2550
|
const content = JSON.stringify(config, null, 2);
|
|
2548
|
-
|
|
2551
|
+
fs17.writeFileSync(fullPath, content, { mode: 384 });
|
|
2549
2552
|
} catch (error) {
|
|
2550
2553
|
throw new Error(`Failed to save config: ${error instanceof Error ? error.message : String(error)}`);
|
|
2551
2554
|
}
|
|
@@ -2696,7 +2699,7 @@ var require_package = __commonJS({
|
|
|
2696
2699
|
"package.json"(exports2, module2) {
|
|
2697
2700
|
module2.exports = {
|
|
2698
2701
|
name: "episoda",
|
|
2699
|
-
version: "0.2.
|
|
2702
|
+
version: "0.2.33",
|
|
2700
2703
|
description: "CLI tool for Episoda local development workflow orchestration",
|
|
2701
2704
|
main: "dist/index.js",
|
|
2702
2705
|
types: "dist/index.d.ts",
|
|
@@ -5029,9 +5032,162 @@ var AgentManager = class {
|
|
|
5029
5032
|
var import_child_process9 = require("child_process");
|
|
5030
5033
|
init_port_check();
|
|
5031
5034
|
var import_core7 = __toESM(require_dist());
|
|
5032
|
-
|
|
5035
|
+
|
|
5036
|
+
// src/utils/env-cache.ts
|
|
5037
|
+
var fs10 = __toESM(require("fs"));
|
|
5038
|
+
var path11 = __toESM(require("path"));
|
|
5039
|
+
var os4 = __toESM(require("os"));
|
|
5040
|
+
|
|
5041
|
+
// src/utils/env-setup.ts
|
|
5033
5042
|
var fs9 = __toESM(require("fs"));
|
|
5034
5043
|
var path10 = __toESM(require("path"));
|
|
5044
|
+
async function fetchEnvVars(apiUrl, accessToken) {
|
|
5045
|
+
try {
|
|
5046
|
+
const url = `${apiUrl}/api/cli/env-vars`;
|
|
5047
|
+
const response = await fetch(url, {
|
|
5048
|
+
method: "GET",
|
|
5049
|
+
headers: {
|
|
5050
|
+
"Authorization": `Bearer ${accessToken}`,
|
|
5051
|
+
"Content-Type": "application/json"
|
|
5052
|
+
}
|
|
5053
|
+
});
|
|
5054
|
+
if (!response.ok) {
|
|
5055
|
+
console.warn(`[env-setup] Failed to fetch env vars: ${response.status}`);
|
|
5056
|
+
return {};
|
|
5057
|
+
}
|
|
5058
|
+
const data = await response.json();
|
|
5059
|
+
const envVars = data.env_vars || {};
|
|
5060
|
+
console.log(`[env-setup] Fetched ${Object.keys(envVars).length} env vars from server`);
|
|
5061
|
+
return envVars;
|
|
5062
|
+
} catch (error) {
|
|
5063
|
+
console.warn("[env-setup] Error fetching env vars:", error instanceof Error ? error.message : error);
|
|
5064
|
+
return {};
|
|
5065
|
+
}
|
|
5066
|
+
}
|
|
5067
|
+
function writeEnvFile(targetPath, envVars) {
|
|
5068
|
+
if (Object.keys(envVars).length === 0) {
|
|
5069
|
+
return;
|
|
5070
|
+
}
|
|
5071
|
+
const envContent = Object.entries(envVars).map(([key, value]) => {
|
|
5072
|
+
if (/[\s'"#$`\\]/.test(value) || value.includes("\n")) {
|
|
5073
|
+
const escaped = value.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n");
|
|
5074
|
+
return `${key}="${escaped}"`;
|
|
5075
|
+
}
|
|
5076
|
+
return `${key}=${value}`;
|
|
5077
|
+
}).join("\n") + "\n";
|
|
5078
|
+
const envPath = path10.join(targetPath, ".env");
|
|
5079
|
+
fs9.writeFileSync(envPath, envContent, { mode: 384 });
|
|
5080
|
+
console.log(`[env-setup] Wrote ${Object.keys(envVars).length} env vars to ${envPath}`);
|
|
5081
|
+
}
|
|
5082
|
+
|
|
5083
|
+
// src/utils/env-cache.ts
|
|
5084
|
+
var DEFAULT_CACHE_TTL = 60;
|
|
5085
|
+
var CACHE_DIR = path11.join(os4.homedir(), ".episoda", "cache");
|
|
5086
|
+
function getCacheFilePath(projectId) {
|
|
5087
|
+
return path11.join(CACHE_DIR, `env-vars-${projectId}.json`);
|
|
5088
|
+
}
|
|
5089
|
+
function ensureCacheDir() {
|
|
5090
|
+
if (!fs10.existsSync(CACHE_DIR)) {
|
|
5091
|
+
fs10.mkdirSync(CACHE_DIR, { recursive: true, mode: 448 });
|
|
5092
|
+
}
|
|
5093
|
+
}
|
|
5094
|
+
function readCache(projectId) {
|
|
5095
|
+
try {
|
|
5096
|
+
const cacheFile = getCacheFilePath(projectId);
|
|
5097
|
+
if (!fs10.existsSync(cacheFile)) {
|
|
5098
|
+
return null;
|
|
5099
|
+
}
|
|
5100
|
+
const content = fs10.readFileSync(cacheFile, "utf-8");
|
|
5101
|
+
const data = JSON.parse(content);
|
|
5102
|
+
if (!data.vars || typeof data.vars !== "object" || !data.fetchedAt) {
|
|
5103
|
+
return null;
|
|
5104
|
+
}
|
|
5105
|
+
return data;
|
|
5106
|
+
} catch {
|
|
5107
|
+
return null;
|
|
5108
|
+
}
|
|
5109
|
+
}
|
|
5110
|
+
function writeCache(projectId, vars) {
|
|
5111
|
+
try {
|
|
5112
|
+
ensureCacheDir();
|
|
5113
|
+
const cacheFile = getCacheFilePath(projectId);
|
|
5114
|
+
const data = {
|
|
5115
|
+
vars,
|
|
5116
|
+
fetchedAt: Date.now(),
|
|
5117
|
+
projectId
|
|
5118
|
+
};
|
|
5119
|
+
fs10.writeFileSync(cacheFile, JSON.stringify(data, null, 2), { mode: 384 });
|
|
5120
|
+
} catch (error) {
|
|
5121
|
+
console.warn("[env-cache] Failed to write cache:", error instanceof Error ? error.message : error);
|
|
5122
|
+
}
|
|
5123
|
+
}
|
|
5124
|
+
function isCacheValid(cache, ttlSeconds) {
|
|
5125
|
+
const ageMs = Date.now() - cache.fetchedAt;
|
|
5126
|
+
return ageMs < ttlSeconds * 1e3;
|
|
5127
|
+
}
|
|
5128
|
+
async function fetchEnvVarsWithCache(apiUrl, accessToken, options = {}) {
|
|
5129
|
+
const {
|
|
5130
|
+
noCache = false,
|
|
5131
|
+
cacheTtl = DEFAULT_CACHE_TTL,
|
|
5132
|
+
offline = false,
|
|
5133
|
+
projectId = "default"
|
|
5134
|
+
} = options;
|
|
5135
|
+
if (offline) {
|
|
5136
|
+
const cache = readCache(projectId);
|
|
5137
|
+
if (cache && Object.keys(cache.vars).length > 0) {
|
|
5138
|
+
return {
|
|
5139
|
+
envVars: cache.vars,
|
|
5140
|
+
fromCache: true,
|
|
5141
|
+
cacheAge: Date.now() - cache.fetchedAt
|
|
5142
|
+
};
|
|
5143
|
+
}
|
|
5144
|
+
throw new Error(
|
|
5145
|
+
"Offline mode requires cached env vars, but no cache found.\nRun without --offline first to populate the cache."
|
|
5146
|
+
);
|
|
5147
|
+
}
|
|
5148
|
+
if (!noCache) {
|
|
5149
|
+
const cache = readCache(projectId);
|
|
5150
|
+
if (cache && isCacheValid(cache, cacheTtl)) {
|
|
5151
|
+
return {
|
|
5152
|
+
envVars: cache.vars,
|
|
5153
|
+
fromCache: true,
|
|
5154
|
+
cacheAge: Date.now() - cache.fetchedAt
|
|
5155
|
+
};
|
|
5156
|
+
}
|
|
5157
|
+
}
|
|
5158
|
+
try {
|
|
5159
|
+
const envVars = await fetchEnvVars(apiUrl, accessToken);
|
|
5160
|
+
if (Object.keys(envVars).length > 0) {
|
|
5161
|
+
writeCache(projectId, envVars);
|
|
5162
|
+
}
|
|
5163
|
+
return {
|
|
5164
|
+
envVars,
|
|
5165
|
+
fromCache: false
|
|
5166
|
+
};
|
|
5167
|
+
} catch (error) {
|
|
5168
|
+
const cache = readCache(projectId);
|
|
5169
|
+
if (cache && Object.keys(cache.vars).length > 0) {
|
|
5170
|
+
const cacheAge = Date.now() - cache.fetchedAt;
|
|
5171
|
+
console.warn(
|
|
5172
|
+
`[env-cache] Failed to fetch env vars, using stale cache (${Math.round(cacheAge / 1e3)}s old)`
|
|
5173
|
+
);
|
|
5174
|
+
return {
|
|
5175
|
+
envVars: cache.vars,
|
|
5176
|
+
fromCache: true,
|
|
5177
|
+
cacheAge
|
|
5178
|
+
};
|
|
5179
|
+
}
|
|
5180
|
+
throw new Error(
|
|
5181
|
+
`Failed to fetch environment variables: ${error instanceof Error ? error.message : error}
|
|
5182
|
+
No cached values available as fallback.`
|
|
5183
|
+
);
|
|
5184
|
+
}
|
|
5185
|
+
}
|
|
5186
|
+
|
|
5187
|
+
// src/utils/dev-server.ts
|
|
5188
|
+
var import_http = __toESM(require("http"));
|
|
5189
|
+
var fs11 = __toESM(require("fs"));
|
|
5190
|
+
var path12 = __toESM(require("path"));
|
|
5035
5191
|
var MAX_RESTART_ATTEMPTS = 5;
|
|
5036
5192
|
var INITIAL_RESTART_DELAY_MS = 2e3;
|
|
5037
5193
|
var MAX_RESTART_DELAY_MS = 3e4;
|
|
@@ -5039,26 +5195,26 @@ var MAX_LOG_SIZE_BYTES = 5 * 1024 * 1024;
|
|
|
5039
5195
|
var NODE_MEMORY_LIMIT_MB = 2048;
|
|
5040
5196
|
var activeServers = /* @__PURE__ */ new Map();
|
|
5041
5197
|
function getLogsDir() {
|
|
5042
|
-
const logsDir =
|
|
5043
|
-
if (!
|
|
5044
|
-
|
|
5198
|
+
const logsDir = path12.join((0, import_core7.getConfigDir)(), "logs");
|
|
5199
|
+
if (!fs11.existsSync(logsDir)) {
|
|
5200
|
+
fs11.mkdirSync(logsDir, { recursive: true });
|
|
5045
5201
|
}
|
|
5046
5202
|
return logsDir;
|
|
5047
5203
|
}
|
|
5048
5204
|
function getLogFilePath(moduleUid) {
|
|
5049
|
-
return
|
|
5205
|
+
return path12.join(getLogsDir(), `dev-${moduleUid}.log`);
|
|
5050
5206
|
}
|
|
5051
5207
|
function rotateLogIfNeeded(logPath) {
|
|
5052
5208
|
try {
|
|
5053
|
-
if (
|
|
5054
|
-
const stats =
|
|
5209
|
+
if (fs11.existsSync(logPath)) {
|
|
5210
|
+
const stats = fs11.statSync(logPath);
|
|
5055
5211
|
if (stats.size > MAX_LOG_SIZE_BYTES) {
|
|
5056
5212
|
const backupPath = `${logPath}.1`;
|
|
5057
|
-
if (
|
|
5058
|
-
|
|
5213
|
+
if (fs11.existsSync(backupPath)) {
|
|
5214
|
+
fs11.unlinkSync(backupPath);
|
|
5059
5215
|
}
|
|
5060
|
-
|
|
5061
|
-
console.log(`[DevServer] EP932: Rotated log file for ${
|
|
5216
|
+
fs11.renameSync(logPath, backupPath);
|
|
5217
|
+
console.log(`[DevServer] EP932: Rotated log file for ${path12.basename(logPath)}`);
|
|
5062
5218
|
}
|
|
5063
5219
|
}
|
|
5064
5220
|
} catch (error) {
|
|
@@ -5071,7 +5227,7 @@ function writeToLog(logPath, line, isError = false) {
|
|
|
5071
5227
|
const prefix = isError ? "ERR" : "OUT";
|
|
5072
5228
|
const logLine = `[${timestamp}] [${prefix}] ${line}
|
|
5073
5229
|
`;
|
|
5074
|
-
|
|
5230
|
+
fs11.appendFileSync(logPath, logLine);
|
|
5075
5231
|
} catch {
|
|
5076
5232
|
}
|
|
5077
5233
|
}
|
|
@@ -5152,7 +5308,7 @@ function calculateRestartDelay(restartCount) {
|
|
|
5152
5308
|
const delay = INITIAL_RESTART_DELAY_MS * Math.pow(2, restartCount);
|
|
5153
5309
|
return Math.min(delay, MAX_RESTART_DELAY_MS);
|
|
5154
5310
|
}
|
|
5155
|
-
function spawnDevServerProcess(projectPath, port, moduleUid, logPath, customCommand) {
|
|
5311
|
+
function spawnDevServerProcess(projectPath, port, moduleUid, logPath, customCommand, injectedEnvVars) {
|
|
5156
5312
|
rotateLogIfNeeded(logPath);
|
|
5157
5313
|
const nodeOptions = process.env.NODE_OPTIONS || "";
|
|
5158
5314
|
const memoryFlag = `--max-old-space-size=${NODE_MEMORY_LIMIT_MB}`;
|
|
@@ -5160,13 +5316,19 @@ function spawnDevServerProcess(projectPath, port, moduleUid, logPath, customComm
|
|
|
5160
5316
|
const command = customCommand || "npm run dev";
|
|
5161
5317
|
const [cmd, ...args] = command.split(" ");
|
|
5162
5318
|
console.log(`[DevServer] EP959: Starting with command: ${command}`);
|
|
5319
|
+
const mergedEnv = {
|
|
5320
|
+
...process.env,
|
|
5321
|
+
...injectedEnvVars,
|
|
5322
|
+
PORT: String(port),
|
|
5323
|
+
NODE_OPTIONS: enhancedNodeOptions
|
|
5324
|
+
};
|
|
5325
|
+
const injectedCount = injectedEnvVars ? Object.keys(injectedEnvVars).length : 0;
|
|
5326
|
+
if (injectedCount > 0) {
|
|
5327
|
+
console.log(`[DevServer] EP998: Injecting ${injectedCount} env vars from database`);
|
|
5328
|
+
}
|
|
5163
5329
|
const devProcess = (0, import_child_process9.spawn)(cmd, args, {
|
|
5164
5330
|
cwd: projectPath,
|
|
5165
|
-
env:
|
|
5166
|
-
...process.env,
|
|
5167
|
-
PORT: String(port),
|
|
5168
|
-
NODE_OPTIONS: enhancedNodeOptions
|
|
5169
|
-
},
|
|
5331
|
+
env: mergedEnv,
|
|
5170
5332
|
stdio: ["ignore", "pipe", "pipe"],
|
|
5171
5333
|
detached: false,
|
|
5172
5334
|
shell: true
|
|
@@ -5216,7 +5378,7 @@ async function handleProcessExit(moduleUid, code, signal) {
|
|
|
5216
5378
|
return;
|
|
5217
5379
|
}
|
|
5218
5380
|
const logPath = serverInfo.logFile || getLogFilePath(moduleUid);
|
|
5219
|
-
const newProcess = spawnDevServerProcess(serverInfo.projectPath, serverInfo.port, moduleUid, logPath, serverInfo.customCommand);
|
|
5381
|
+
const newProcess = spawnDevServerProcess(serverInfo.projectPath, serverInfo.port, moduleUid, logPath, serverInfo.customCommand, serverInfo.injectedEnvVars);
|
|
5220
5382
|
const updatedInfo = {
|
|
5221
5383
|
...serverInfo,
|
|
5222
5384
|
process: newProcess,
|
|
@@ -5256,9 +5418,27 @@ async function startDevServer(projectPath, port = 3e3, moduleUid = "default", op
|
|
|
5256
5418
|
}
|
|
5257
5419
|
}
|
|
5258
5420
|
console.log(`[DevServer] EP932: Starting dev server for ${moduleUid} on port ${port} (auto-restart: ${autoRestart})...`);
|
|
5421
|
+
let injectedEnvVars = {};
|
|
5422
|
+
try {
|
|
5423
|
+
const config = await (0, import_core7.loadConfig)();
|
|
5424
|
+
if (config?.access_token && config?.project_id) {
|
|
5425
|
+
const apiUrl = config.api_url || "https://episoda.dev";
|
|
5426
|
+
const result = await fetchEnvVarsWithCache(apiUrl, config.access_token, {
|
|
5427
|
+
projectId: config.project_id,
|
|
5428
|
+
cacheTtl: 300
|
|
5429
|
+
// 5 minute cache for daemon
|
|
5430
|
+
});
|
|
5431
|
+
injectedEnvVars = result.envVars;
|
|
5432
|
+
console.log(`[DevServer] EP998: Loaded ${Object.keys(injectedEnvVars).length} env vars (from ${result.fromCache ? "cache" : "server"})`);
|
|
5433
|
+
} else {
|
|
5434
|
+
console.log(`[DevServer] EP998: No auth config, skipping env var injection`);
|
|
5435
|
+
}
|
|
5436
|
+
} catch (error) {
|
|
5437
|
+
console.warn(`[DevServer] EP998: Failed to fetch env vars:`, error instanceof Error ? error.message : error);
|
|
5438
|
+
}
|
|
5259
5439
|
try {
|
|
5260
5440
|
const logPath = getLogFilePath(moduleUid);
|
|
5261
|
-
const devProcess = spawnDevServerProcess(projectPath, port, moduleUid, logPath, customCommand);
|
|
5441
|
+
const devProcess = spawnDevServerProcess(projectPath, port, moduleUid, logPath, customCommand, injectedEnvVars);
|
|
5262
5442
|
const serverInfo = {
|
|
5263
5443
|
process: devProcess,
|
|
5264
5444
|
moduleUid,
|
|
@@ -5269,8 +5449,10 @@ async function startDevServer(projectPath, port = 3e3, moduleUid = "default", op
|
|
|
5269
5449
|
lastRestartAt: null,
|
|
5270
5450
|
autoRestartEnabled: autoRestart,
|
|
5271
5451
|
logFile: logPath,
|
|
5272
|
-
customCommand
|
|
5452
|
+
customCommand,
|
|
5273
5453
|
// EP959-m2: Store for restarts
|
|
5454
|
+
injectedEnvVars
|
|
5455
|
+
// EP998: Store for restarts
|
|
5274
5456
|
};
|
|
5275
5457
|
activeServers.set(moduleUid, serverInfo);
|
|
5276
5458
|
writeToLog(logPath, `Starting dev server on port ${port}`, false);
|
|
@@ -5356,8 +5538,8 @@ async function ensureDevServer(projectPath, port = 3e3, moduleUid = "default", c
|
|
|
5356
5538
|
}
|
|
5357
5539
|
|
|
5358
5540
|
// src/utils/port-detect.ts
|
|
5359
|
-
var
|
|
5360
|
-
var
|
|
5541
|
+
var fs12 = __toESM(require("fs"));
|
|
5542
|
+
var path13 = __toESM(require("path"));
|
|
5361
5543
|
var DEFAULT_PORT = 3e3;
|
|
5362
5544
|
function detectDevPort(projectPath) {
|
|
5363
5545
|
const envPort = getPortFromEnv(projectPath);
|
|
@@ -5375,15 +5557,15 @@ function detectDevPort(projectPath) {
|
|
|
5375
5557
|
}
|
|
5376
5558
|
function getPortFromEnv(projectPath) {
|
|
5377
5559
|
const envPaths = [
|
|
5378
|
-
|
|
5379
|
-
|
|
5380
|
-
|
|
5381
|
-
|
|
5560
|
+
path13.join(projectPath, ".env"),
|
|
5561
|
+
path13.join(projectPath, ".env.local"),
|
|
5562
|
+
path13.join(projectPath, ".env.development"),
|
|
5563
|
+
path13.join(projectPath, ".env.development.local")
|
|
5382
5564
|
];
|
|
5383
5565
|
for (const envPath of envPaths) {
|
|
5384
5566
|
try {
|
|
5385
|
-
if (!
|
|
5386
|
-
const content =
|
|
5567
|
+
if (!fs12.existsSync(envPath)) continue;
|
|
5568
|
+
const content = fs12.readFileSync(envPath, "utf-8");
|
|
5387
5569
|
const lines = content.split("\n");
|
|
5388
5570
|
for (const line of lines) {
|
|
5389
5571
|
const match = line.match(/^\s*PORT\s*=\s*["']?(\d+)["']?\s*(?:#.*)?$/);
|
|
@@ -5400,10 +5582,10 @@ function getPortFromEnv(projectPath) {
|
|
|
5400
5582
|
return null;
|
|
5401
5583
|
}
|
|
5402
5584
|
function getPortFromPackageJson(projectPath) {
|
|
5403
|
-
const packageJsonPath =
|
|
5585
|
+
const packageJsonPath = path13.join(projectPath, "package.json");
|
|
5404
5586
|
try {
|
|
5405
|
-
if (!
|
|
5406
|
-
const content =
|
|
5587
|
+
if (!fs12.existsSync(packageJsonPath)) return null;
|
|
5588
|
+
const content = fs12.readFileSync(packageJsonPath, "utf-8");
|
|
5407
5589
|
const pkg = JSON.parse(content);
|
|
5408
5590
|
const devScript = pkg.scripts?.dev;
|
|
5409
5591
|
if (!devScript) return null;
|
|
@@ -5427,8 +5609,8 @@ function getPortFromPackageJson(projectPath) {
|
|
|
5427
5609
|
}
|
|
5428
5610
|
|
|
5429
5611
|
// src/daemon/worktree-manager.ts
|
|
5430
|
-
var
|
|
5431
|
-
var
|
|
5612
|
+
var fs13 = __toESM(require("fs"));
|
|
5613
|
+
var path14 = __toESM(require("path"));
|
|
5432
5614
|
var import_core8 = __toESM(require_dist());
|
|
5433
5615
|
function validateModuleUid(moduleUid) {
|
|
5434
5616
|
if (!moduleUid || typeof moduleUid !== "string" || !moduleUid.trim()) {
|
|
@@ -5452,8 +5634,8 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5452
5634
|
// ============================================================
|
|
5453
5635
|
this.lockPath = "";
|
|
5454
5636
|
this.projectRoot = projectRoot;
|
|
5455
|
-
this.bareRepoPath =
|
|
5456
|
-
this.configPath =
|
|
5637
|
+
this.bareRepoPath = path14.join(projectRoot, ".bare");
|
|
5638
|
+
this.configPath = path14.join(projectRoot, ".episoda", "config.json");
|
|
5457
5639
|
this.gitExecutor = new import_core8.GitExecutor();
|
|
5458
5640
|
}
|
|
5459
5641
|
/**
|
|
@@ -5462,10 +5644,10 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5462
5644
|
* @returns true if valid project, false otherwise
|
|
5463
5645
|
*/
|
|
5464
5646
|
async initialize() {
|
|
5465
|
-
if (!
|
|
5647
|
+
if (!fs13.existsSync(this.bareRepoPath)) {
|
|
5466
5648
|
return false;
|
|
5467
5649
|
}
|
|
5468
|
-
if (!
|
|
5650
|
+
if (!fs13.existsSync(this.configPath)) {
|
|
5469
5651
|
return false;
|
|
5470
5652
|
}
|
|
5471
5653
|
try {
|
|
@@ -5480,8 +5662,8 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5480
5662
|
*/
|
|
5481
5663
|
static async createProject(projectRoot, repoUrl, projectId, workspaceSlug, projectSlug) {
|
|
5482
5664
|
const manager = new _WorktreeManager(projectRoot);
|
|
5483
|
-
const episodaDir =
|
|
5484
|
-
|
|
5665
|
+
const episodaDir = path14.join(projectRoot, ".episoda");
|
|
5666
|
+
fs13.mkdirSync(episodaDir, { recursive: true });
|
|
5485
5667
|
const cloneResult = await manager.gitExecutor.execute({
|
|
5486
5668
|
action: "clone_bare",
|
|
5487
5669
|
url: repoUrl,
|
|
@@ -5512,7 +5694,7 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5512
5694
|
error: `Invalid module UID: "${moduleUid}" - contains disallowed characters`
|
|
5513
5695
|
};
|
|
5514
5696
|
}
|
|
5515
|
-
const worktreePath =
|
|
5697
|
+
const worktreePath = path14.join(this.projectRoot, moduleUid);
|
|
5516
5698
|
const lockAcquired = await this.acquireLock();
|
|
5517
5699
|
if (!lockAcquired) {
|
|
5518
5700
|
return {
|
|
@@ -5533,14 +5715,19 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5533
5715
|
action: "fetch",
|
|
5534
5716
|
remote: "origin"
|
|
5535
5717
|
}, { cwd: this.bareRepoPath });
|
|
5536
|
-
if (!fetchResult.success) {
|
|
5537
|
-
console.
|
|
5718
|
+
if (!fetchResult.success && createBranch) {
|
|
5719
|
+
console.error("[worktree-manager] Failed to fetch from origin:", fetchResult.output);
|
|
5720
|
+
return {
|
|
5721
|
+
success: false,
|
|
5722
|
+
error: "Failed to fetch latest refs from origin. Cannot create worktree with stale refs."
|
|
5723
|
+
};
|
|
5538
5724
|
}
|
|
5539
5725
|
const result = await this.gitExecutor.execute({
|
|
5540
5726
|
action: "worktree_add",
|
|
5541
5727
|
path: worktreePath,
|
|
5542
5728
|
branch: branchName,
|
|
5543
|
-
create: createBranch
|
|
5729
|
+
create: createBranch,
|
|
5730
|
+
startPoint: createBranch ? "origin/main" : void 0
|
|
5544
5731
|
}, { cwd: this.bareRepoPath });
|
|
5545
5732
|
if (!result.success) {
|
|
5546
5733
|
return {
|
|
@@ -5688,7 +5875,7 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5688
5875
|
let prunedCount = 0;
|
|
5689
5876
|
await this.updateConfigSafe((config) => {
|
|
5690
5877
|
const initialCount = config.worktrees.length;
|
|
5691
|
-
config.worktrees = config.worktrees.filter((w) =>
|
|
5878
|
+
config.worktrees = config.worktrees.filter((w) => fs13.existsSync(w.worktreePath));
|
|
5692
5879
|
prunedCount = initialCount - config.worktrees.length;
|
|
5693
5880
|
return config;
|
|
5694
5881
|
});
|
|
@@ -5769,16 +5956,16 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5769
5956
|
const retryInterval = 50;
|
|
5770
5957
|
while (Date.now() - startTime < timeoutMs) {
|
|
5771
5958
|
try {
|
|
5772
|
-
|
|
5959
|
+
fs13.writeFileSync(lockPath, String(process.pid), { flag: "wx" });
|
|
5773
5960
|
return true;
|
|
5774
5961
|
} catch (err) {
|
|
5775
5962
|
if (err.code === "EEXIST") {
|
|
5776
5963
|
try {
|
|
5777
|
-
const stats =
|
|
5964
|
+
const stats = fs13.statSync(lockPath);
|
|
5778
5965
|
const lockAge = Date.now() - stats.mtimeMs;
|
|
5779
5966
|
if (lockAge > 3e4) {
|
|
5780
5967
|
try {
|
|
5781
|
-
const lockContent =
|
|
5968
|
+
const lockContent = fs13.readFileSync(lockPath, "utf-8").trim();
|
|
5782
5969
|
const lockPid = parseInt(lockContent, 10);
|
|
5783
5970
|
if (!isNaN(lockPid) && this.isProcessRunning(lockPid)) {
|
|
5784
5971
|
await new Promise((resolve3) => setTimeout(resolve3, retryInterval));
|
|
@@ -5787,7 +5974,7 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5787
5974
|
} catch {
|
|
5788
5975
|
}
|
|
5789
5976
|
try {
|
|
5790
|
-
|
|
5977
|
+
fs13.unlinkSync(lockPath);
|
|
5791
5978
|
} catch {
|
|
5792
5979
|
}
|
|
5793
5980
|
continue;
|
|
@@ -5808,16 +5995,16 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5808
5995
|
*/
|
|
5809
5996
|
releaseLock() {
|
|
5810
5997
|
try {
|
|
5811
|
-
|
|
5998
|
+
fs13.unlinkSync(this.getLockPath());
|
|
5812
5999
|
} catch {
|
|
5813
6000
|
}
|
|
5814
6001
|
}
|
|
5815
6002
|
readConfig() {
|
|
5816
6003
|
try {
|
|
5817
|
-
if (!
|
|
6004
|
+
if (!fs13.existsSync(this.configPath)) {
|
|
5818
6005
|
return null;
|
|
5819
6006
|
}
|
|
5820
|
-
const content =
|
|
6007
|
+
const content = fs13.readFileSync(this.configPath, "utf-8");
|
|
5821
6008
|
return JSON.parse(content);
|
|
5822
6009
|
} catch (error) {
|
|
5823
6010
|
console.error("[WorktreeManager] Failed to read config:", error);
|
|
@@ -5826,11 +6013,11 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5826
6013
|
}
|
|
5827
6014
|
writeConfig(config) {
|
|
5828
6015
|
try {
|
|
5829
|
-
const dir =
|
|
5830
|
-
if (!
|
|
5831
|
-
|
|
6016
|
+
const dir = path14.dirname(this.configPath);
|
|
6017
|
+
if (!fs13.existsSync(dir)) {
|
|
6018
|
+
fs13.mkdirSync(dir, { recursive: true });
|
|
5832
6019
|
}
|
|
5833
|
-
|
|
6020
|
+
fs13.writeFileSync(this.configPath, JSON.stringify(config, null, 2), "utf-8");
|
|
5834
6021
|
} catch (error) {
|
|
5835
6022
|
console.error("[WorktreeManager] Failed to write config:", error);
|
|
5836
6023
|
throw error;
|
|
@@ -5911,14 +6098,14 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
5911
6098
|
}
|
|
5912
6099
|
try {
|
|
5913
6100
|
for (const file of files) {
|
|
5914
|
-
const srcPath =
|
|
5915
|
-
const destPath =
|
|
5916
|
-
if (
|
|
5917
|
-
const destDir =
|
|
5918
|
-
if (!
|
|
5919
|
-
|
|
5920
|
-
}
|
|
5921
|
-
|
|
6101
|
+
const srcPath = path14.join(mainWorktree.worktreePath, file);
|
|
6102
|
+
const destPath = path14.join(worktree.worktreePath, file);
|
|
6103
|
+
if (fs13.existsSync(srcPath)) {
|
|
6104
|
+
const destDir = path14.dirname(destPath);
|
|
6105
|
+
if (!fs13.existsSync(destDir)) {
|
|
6106
|
+
fs13.mkdirSync(destDir, { recursive: true });
|
|
6107
|
+
}
|
|
6108
|
+
fs13.copyFileSync(srcPath, destPath);
|
|
5922
6109
|
console.log(`[WorktreeManager] EP964: Copied ${file} to ${moduleUid} (deprecated)`);
|
|
5923
6110
|
} else {
|
|
5924
6111
|
console.log(`[WorktreeManager] EP964: Skipped ${file} (not found in main)`);
|
|
@@ -6001,27 +6188,27 @@ var WorktreeManager = class _WorktreeManager {
|
|
|
6001
6188
|
}
|
|
6002
6189
|
};
|
|
6003
6190
|
function getEpisodaRoot() {
|
|
6004
|
-
return process.env.EPISODA_ROOT ||
|
|
6191
|
+
return process.env.EPISODA_ROOT || path14.join(require("os").homedir(), "episoda");
|
|
6005
6192
|
}
|
|
6006
6193
|
async function isWorktreeProject(projectRoot) {
|
|
6007
6194
|
const manager = new WorktreeManager(projectRoot);
|
|
6008
6195
|
return manager.initialize();
|
|
6009
6196
|
}
|
|
6010
6197
|
async function findProjectRoot(startPath) {
|
|
6011
|
-
let current =
|
|
6198
|
+
let current = path14.resolve(startPath);
|
|
6012
6199
|
const episodaRoot = getEpisodaRoot();
|
|
6013
6200
|
if (!current.startsWith(episodaRoot)) {
|
|
6014
6201
|
return null;
|
|
6015
6202
|
}
|
|
6016
6203
|
for (let i = 0; i < 10; i++) {
|
|
6017
|
-
const bareDir =
|
|
6018
|
-
const episodaDir =
|
|
6019
|
-
if (
|
|
6204
|
+
const bareDir = path14.join(current, ".bare");
|
|
6205
|
+
const episodaDir = path14.join(current, ".episoda");
|
|
6206
|
+
if (fs13.existsSync(bareDir) && fs13.existsSync(episodaDir)) {
|
|
6020
6207
|
if (await isWorktreeProject(current)) {
|
|
6021
6208
|
return current;
|
|
6022
6209
|
}
|
|
6023
6210
|
}
|
|
6024
|
-
const parent =
|
|
6211
|
+
const parent = path14.dirname(current);
|
|
6025
6212
|
if (parent === current) {
|
|
6026
6213
|
break;
|
|
6027
6214
|
}
|
|
@@ -6030,39 +6217,20 @@ async function findProjectRoot(startPath) {
|
|
|
6030
6217
|
return null;
|
|
6031
6218
|
}
|
|
6032
6219
|
|
|
6033
|
-
// src/utils/env-setup.ts
|
|
6034
|
-
var fs12 = __toESM(require("fs"));
|
|
6035
|
-
var path13 = __toESM(require("path"));
|
|
6036
|
-
function writeEnvFile(targetPath, envVars) {
|
|
6037
|
-
if (Object.keys(envVars).length === 0) {
|
|
6038
|
-
return;
|
|
6039
|
-
}
|
|
6040
|
-
const envContent = Object.entries(envVars).map(([key, value]) => {
|
|
6041
|
-
if (/[\s'"#$`\\]/.test(value) || value.includes("\n")) {
|
|
6042
|
-
const escaped = value.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/\n/g, "\\n");
|
|
6043
|
-
return `${key}="${escaped}"`;
|
|
6044
|
-
}
|
|
6045
|
-
return `${key}=${value}`;
|
|
6046
|
-
}).join("\n") + "\n";
|
|
6047
|
-
const envPath = path13.join(targetPath, ".env");
|
|
6048
|
-
fs12.writeFileSync(envPath, envContent, { mode: 384 });
|
|
6049
|
-
console.log(`[env-setup] Wrote ${Object.keys(envVars).length} env vars to ${envPath}`);
|
|
6050
|
-
}
|
|
6051
|
-
|
|
6052
6220
|
// src/utils/worktree.ts
|
|
6053
|
-
var
|
|
6054
|
-
var
|
|
6055
|
-
var
|
|
6221
|
+
var path15 = __toESM(require("path"));
|
|
6222
|
+
var fs14 = __toESM(require("fs"));
|
|
6223
|
+
var os5 = __toESM(require("os"));
|
|
6056
6224
|
var import_core9 = __toESM(require_dist());
|
|
6057
6225
|
function getEpisodaRoot2() {
|
|
6058
|
-
return process.env.EPISODA_ROOT ||
|
|
6226
|
+
return process.env.EPISODA_ROOT || path15.join(os5.homedir(), "episoda");
|
|
6059
6227
|
}
|
|
6060
6228
|
function getWorktreeInfo(moduleUid, workspaceSlug, projectSlug) {
|
|
6061
6229
|
const root = getEpisodaRoot2();
|
|
6062
|
-
const worktreePath =
|
|
6230
|
+
const worktreePath = path15.join(root, workspaceSlug, projectSlug, moduleUid);
|
|
6063
6231
|
return {
|
|
6064
6232
|
path: worktreePath,
|
|
6065
|
-
exists:
|
|
6233
|
+
exists: fs14.existsSync(worktreePath),
|
|
6066
6234
|
moduleUid
|
|
6067
6235
|
};
|
|
6068
6236
|
}
|
|
@@ -6079,7 +6247,7 @@ async function getProjectRootPath() {
|
|
|
6079
6247
|
if (!config?.workspace_slug || !config?.project_slug) {
|
|
6080
6248
|
return null;
|
|
6081
6249
|
}
|
|
6082
|
-
return
|
|
6250
|
+
return path15.join(
|
|
6083
6251
|
getEpisodaRoot2(),
|
|
6084
6252
|
config.workspace_slug,
|
|
6085
6253
|
config.project_slug
|
|
@@ -6129,61 +6297,61 @@ function clearAllPorts() {
|
|
|
6129
6297
|
}
|
|
6130
6298
|
|
|
6131
6299
|
// src/framework-detector.ts
|
|
6132
|
-
var
|
|
6133
|
-
var
|
|
6300
|
+
var fs15 = __toESM(require("fs"));
|
|
6301
|
+
var path16 = __toESM(require("path"));
|
|
6134
6302
|
function getInstallCommand(cwd) {
|
|
6135
|
-
if (
|
|
6303
|
+
if (fs15.existsSync(path16.join(cwd, "bun.lockb"))) {
|
|
6136
6304
|
return {
|
|
6137
6305
|
command: ["bun", "install"],
|
|
6138
6306
|
description: "Installing dependencies with bun",
|
|
6139
6307
|
detectedFrom: "bun.lockb"
|
|
6140
6308
|
};
|
|
6141
6309
|
}
|
|
6142
|
-
if (
|
|
6310
|
+
if (fs15.existsSync(path16.join(cwd, "pnpm-lock.yaml"))) {
|
|
6143
6311
|
return {
|
|
6144
6312
|
command: ["pnpm", "install"],
|
|
6145
6313
|
description: "Installing dependencies with pnpm",
|
|
6146
6314
|
detectedFrom: "pnpm-lock.yaml"
|
|
6147
6315
|
};
|
|
6148
6316
|
}
|
|
6149
|
-
if (
|
|
6317
|
+
if (fs15.existsSync(path16.join(cwd, "yarn.lock"))) {
|
|
6150
6318
|
return {
|
|
6151
6319
|
command: ["yarn", "install"],
|
|
6152
6320
|
description: "Installing dependencies with yarn",
|
|
6153
6321
|
detectedFrom: "yarn.lock"
|
|
6154
6322
|
};
|
|
6155
6323
|
}
|
|
6156
|
-
if (
|
|
6324
|
+
if (fs15.existsSync(path16.join(cwd, "package-lock.json"))) {
|
|
6157
6325
|
return {
|
|
6158
6326
|
command: ["npm", "ci"],
|
|
6159
6327
|
description: "Installing dependencies with npm ci",
|
|
6160
6328
|
detectedFrom: "package-lock.json"
|
|
6161
6329
|
};
|
|
6162
6330
|
}
|
|
6163
|
-
if (
|
|
6331
|
+
if (fs15.existsSync(path16.join(cwd, "package.json"))) {
|
|
6164
6332
|
return {
|
|
6165
6333
|
command: ["npm", "install"],
|
|
6166
6334
|
description: "Installing dependencies with npm",
|
|
6167
6335
|
detectedFrom: "package.json"
|
|
6168
6336
|
};
|
|
6169
6337
|
}
|
|
6170
|
-
if (
|
|
6338
|
+
if (fs15.existsSync(path16.join(cwd, "Pipfile.lock")) || fs15.existsSync(path16.join(cwd, "Pipfile"))) {
|
|
6171
6339
|
return {
|
|
6172
6340
|
command: ["pipenv", "install"],
|
|
6173
6341
|
description: "Installing dependencies with pipenv",
|
|
6174
|
-
detectedFrom:
|
|
6342
|
+
detectedFrom: fs15.existsSync(path16.join(cwd, "Pipfile.lock")) ? "Pipfile.lock" : "Pipfile"
|
|
6175
6343
|
};
|
|
6176
6344
|
}
|
|
6177
|
-
if (
|
|
6345
|
+
if (fs15.existsSync(path16.join(cwd, "poetry.lock"))) {
|
|
6178
6346
|
return {
|
|
6179
6347
|
command: ["poetry", "install"],
|
|
6180
6348
|
description: "Installing dependencies with poetry",
|
|
6181
6349
|
detectedFrom: "poetry.lock"
|
|
6182
6350
|
};
|
|
6183
6351
|
}
|
|
6184
|
-
if (
|
|
6185
|
-
const pyprojectPath =
|
|
6186
|
-
const content =
|
|
6352
|
+
if (fs15.existsSync(path16.join(cwd, "pyproject.toml"))) {
|
|
6353
|
+
const pyprojectPath = path16.join(cwd, "pyproject.toml");
|
|
6354
|
+
const content = fs15.readFileSync(pyprojectPath, "utf-8");
|
|
6187
6355
|
if (content.includes("[tool.poetry]")) {
|
|
6188
6356
|
return {
|
|
6189
6357
|
command: ["poetry", "install"],
|
|
@@ -6192,41 +6360,41 @@ function getInstallCommand(cwd) {
|
|
|
6192
6360
|
};
|
|
6193
6361
|
}
|
|
6194
6362
|
}
|
|
6195
|
-
if (
|
|
6363
|
+
if (fs15.existsSync(path16.join(cwd, "requirements.txt"))) {
|
|
6196
6364
|
return {
|
|
6197
6365
|
command: ["pip", "install", "-r", "requirements.txt"],
|
|
6198
6366
|
description: "Installing dependencies with pip",
|
|
6199
6367
|
detectedFrom: "requirements.txt"
|
|
6200
6368
|
};
|
|
6201
6369
|
}
|
|
6202
|
-
if (
|
|
6370
|
+
if (fs15.existsSync(path16.join(cwd, "Gemfile.lock")) || fs15.existsSync(path16.join(cwd, "Gemfile"))) {
|
|
6203
6371
|
return {
|
|
6204
6372
|
command: ["bundle", "install"],
|
|
6205
6373
|
description: "Installing dependencies with bundler",
|
|
6206
|
-
detectedFrom:
|
|
6374
|
+
detectedFrom: fs15.existsSync(path16.join(cwd, "Gemfile.lock")) ? "Gemfile.lock" : "Gemfile"
|
|
6207
6375
|
};
|
|
6208
6376
|
}
|
|
6209
|
-
if (
|
|
6377
|
+
if (fs15.existsSync(path16.join(cwd, "go.sum")) || fs15.existsSync(path16.join(cwd, "go.mod"))) {
|
|
6210
6378
|
return {
|
|
6211
6379
|
command: ["go", "mod", "download"],
|
|
6212
6380
|
description: "Downloading Go modules",
|
|
6213
|
-
detectedFrom:
|
|
6381
|
+
detectedFrom: fs15.existsSync(path16.join(cwd, "go.sum")) ? "go.sum" : "go.mod"
|
|
6214
6382
|
};
|
|
6215
6383
|
}
|
|
6216
|
-
if (
|
|
6384
|
+
if (fs15.existsSync(path16.join(cwd, "Cargo.lock")) || fs15.existsSync(path16.join(cwd, "Cargo.toml"))) {
|
|
6217
6385
|
return {
|
|
6218
6386
|
command: ["cargo", "build"],
|
|
6219
6387
|
description: "Building Rust project (downloads dependencies)",
|
|
6220
|
-
detectedFrom:
|
|
6388
|
+
detectedFrom: fs15.existsSync(path16.join(cwd, "Cargo.lock")) ? "Cargo.lock" : "Cargo.toml"
|
|
6221
6389
|
};
|
|
6222
6390
|
}
|
|
6223
6391
|
return null;
|
|
6224
6392
|
}
|
|
6225
6393
|
|
|
6226
6394
|
// src/daemon/daemon-process.ts
|
|
6227
|
-
var
|
|
6228
|
-
var
|
|
6229
|
-
var
|
|
6395
|
+
var fs16 = __toESM(require("fs"));
|
|
6396
|
+
var os6 = __toESM(require("os"));
|
|
6397
|
+
var path17 = __toESM(require("path"));
|
|
6230
6398
|
var packageJson = require_package();
|
|
6231
6399
|
async function ensureValidToken(config, bufferMs = 5 * 60 * 1e3) {
|
|
6232
6400
|
const now = Date.now();
|
|
@@ -6295,7 +6463,7 @@ async function fetchWithAuth(url, options = {}, retryOnUnauthorized = true) {
|
|
|
6295
6463
|
}
|
|
6296
6464
|
return response;
|
|
6297
6465
|
}
|
|
6298
|
-
async function
|
|
6466
|
+
async function fetchEnvVars2() {
|
|
6299
6467
|
try {
|
|
6300
6468
|
const config = await (0, import_core10.loadConfig)();
|
|
6301
6469
|
if (!config?.project_id) {
|
|
@@ -6438,9 +6606,9 @@ var Daemon = class _Daemon {
|
|
|
6438
6606
|
machineId: this.machineId,
|
|
6439
6607
|
deviceId: this.deviceId,
|
|
6440
6608
|
// EP726: UUID for unified device identification
|
|
6441
|
-
hostname:
|
|
6442
|
-
platform:
|
|
6443
|
-
arch:
|
|
6609
|
+
hostname: os6.hostname(),
|
|
6610
|
+
platform: os6.platform(),
|
|
6611
|
+
arch: os6.arch(),
|
|
6444
6612
|
projects
|
|
6445
6613
|
};
|
|
6446
6614
|
});
|
|
@@ -6723,7 +6891,7 @@ var Daemon = class _Daemon {
|
|
|
6723
6891
|
client.updateActivity();
|
|
6724
6892
|
try {
|
|
6725
6893
|
const gitCmd = message.command;
|
|
6726
|
-
const bareRepoPath =
|
|
6894
|
+
const bareRepoPath = path17.join(projectPath, ".bare");
|
|
6727
6895
|
const cwd = gitCmd.worktreePath || bareRepoPath;
|
|
6728
6896
|
if (gitCmd.worktreePath) {
|
|
6729
6897
|
console.log(`[Daemon] Routing command to worktree: ${gitCmd.worktreePath}`);
|
|
@@ -7243,7 +7411,7 @@ var Daemon = class _Daemon {
|
|
|
7243
7411
|
}
|
|
7244
7412
|
const worktreeConfig = await (0, import_core10.loadConfig)();
|
|
7245
7413
|
const setupConfig = worktreeConfig?.project_settings;
|
|
7246
|
-
const envVars = await
|
|
7414
|
+
const envVars = await fetchEnvVars2();
|
|
7247
7415
|
const hasEnvVars = Object.keys(envVars).length > 0;
|
|
7248
7416
|
const hasSetupConfig = setupConfig?.worktree_copy_files?.length || setupConfig?.worktree_setup_script || hasEnvVars;
|
|
7249
7417
|
{
|
|
@@ -7357,8 +7525,8 @@ var Daemon = class _Daemon {
|
|
|
7357
7525
|
let daemonPid;
|
|
7358
7526
|
try {
|
|
7359
7527
|
const pidPath = getPidFilePath();
|
|
7360
|
-
if (
|
|
7361
|
-
const pidStr =
|
|
7528
|
+
if (fs16.existsSync(pidPath)) {
|
|
7529
|
+
const pidStr = fs16.readFileSync(pidPath, "utf-8").trim();
|
|
7362
7530
|
daemonPid = parseInt(pidStr, 10);
|
|
7363
7531
|
}
|
|
7364
7532
|
} catch (pidError) {
|
|
@@ -7382,9 +7550,9 @@ var Daemon = class _Daemon {
|
|
|
7382
7550
|
client.once("auth_error", errorHandler);
|
|
7383
7551
|
});
|
|
7384
7552
|
await client.connect(wsUrl, config.access_token, this.machineId, {
|
|
7385
|
-
hostname:
|
|
7386
|
-
osPlatform:
|
|
7387
|
-
osArch:
|
|
7553
|
+
hostname: os6.hostname(),
|
|
7554
|
+
osPlatform: os6.platform(),
|
|
7555
|
+
osArch: os6.arch(),
|
|
7388
7556
|
daemonPid
|
|
7389
7557
|
});
|
|
7390
7558
|
console.log(`[Daemon] Successfully connected to project ${projectId}`);
|
|
@@ -7479,27 +7647,27 @@ var Daemon = class _Daemon {
|
|
|
7479
7647
|
*/
|
|
7480
7648
|
async installGitHooks(projectPath) {
|
|
7481
7649
|
const hooks = ["post-checkout", "pre-commit", "post-commit"];
|
|
7482
|
-
const hooksDir =
|
|
7483
|
-
if (!
|
|
7650
|
+
const hooksDir = path17.join(projectPath, ".git", "hooks");
|
|
7651
|
+
if (!fs16.existsSync(hooksDir)) {
|
|
7484
7652
|
console.warn(`[Daemon] Hooks directory not found: ${hooksDir}`);
|
|
7485
7653
|
return;
|
|
7486
7654
|
}
|
|
7487
7655
|
for (const hookName of hooks) {
|
|
7488
7656
|
try {
|
|
7489
|
-
const hookPath =
|
|
7490
|
-
const bundledHookPath =
|
|
7491
|
-
if (!
|
|
7657
|
+
const hookPath = path17.join(hooksDir, hookName);
|
|
7658
|
+
const bundledHookPath = path17.join(__dirname, "..", "hooks", hookName);
|
|
7659
|
+
if (!fs16.existsSync(bundledHookPath)) {
|
|
7492
7660
|
console.warn(`[Daemon] Bundled hook not found: ${bundledHookPath}`);
|
|
7493
7661
|
continue;
|
|
7494
7662
|
}
|
|
7495
|
-
const hookContent =
|
|
7496
|
-
if (
|
|
7497
|
-
const existingContent =
|
|
7663
|
+
const hookContent = fs16.readFileSync(bundledHookPath, "utf-8");
|
|
7664
|
+
if (fs16.existsSync(hookPath)) {
|
|
7665
|
+
const existingContent = fs16.readFileSync(hookPath, "utf-8");
|
|
7498
7666
|
if (existingContent === hookContent) {
|
|
7499
7667
|
continue;
|
|
7500
7668
|
}
|
|
7501
7669
|
}
|
|
7502
|
-
|
|
7670
|
+
fs16.writeFileSync(hookPath, hookContent, { mode: 493 });
|
|
7503
7671
|
console.log(`[Daemon] Installed git hook: ${hookName}`);
|
|
7504
7672
|
} catch (error) {
|
|
7505
7673
|
console.warn(`[Daemon] Failed to install ${hookName} hook:`, error instanceof Error ? error.message : error);
|
|
@@ -7731,7 +7899,7 @@ var Daemon = class _Daemon {
|
|
|
7731
7899
|
continue;
|
|
7732
7900
|
}
|
|
7733
7901
|
const setupConfig = config.project_settings;
|
|
7734
|
-
const envVars = await
|
|
7902
|
+
const envVars = await fetchEnvVars2();
|
|
7735
7903
|
console.log(`[Daemon] EP995: Starting setup for reconciled module ${moduleUid}`);
|
|
7736
7904
|
await worktreeManager.updateWorktreeStatus(moduleUid, "pending");
|
|
7737
7905
|
await this.updateModuleWorktreeStatus(moduleUid, "pending", newWorktree.path);
|
|
@@ -8615,8 +8783,8 @@ var Daemon = class _Daemon {
|
|
|
8615
8783
|
await this.shutdown();
|
|
8616
8784
|
try {
|
|
8617
8785
|
const pidPath = getPidFilePath();
|
|
8618
|
-
if (
|
|
8619
|
-
|
|
8786
|
+
if (fs16.existsSync(pidPath)) {
|
|
8787
|
+
fs16.unlinkSync(pidPath);
|
|
8620
8788
|
console.log("[Daemon] PID file cleaned up");
|
|
8621
8789
|
}
|
|
8622
8790
|
} catch (error) {
|