xcode-build-queue 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +266 -0
- package/dist/backends/mcp.d.ts +5 -0
- package/dist/backends/mcp.js +224 -0
- package/dist/backends/xcodebuild.d.ts +5 -0
- package/dist/backends/xcodebuild.js +83 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +244 -0
- package/dist/config.d.ts +15 -0
- package/dist/config.js +114 -0
- package/dist/daemon.d.ts +8 -0
- package/dist/daemon.js +186 -0
- package/dist/enqueue.d.ts +14 -0
- package/dist/enqueue.js +133 -0
- package/dist/executor.d.ts +5 -0
- package/dist/executor.js +134 -0
- package/dist/setup-claude.d.ts +10 -0
- package/dist/setup-claude.js +100 -0
- package/dist/snapshot.d.ts +23 -0
- package/dist/snapshot.js +99 -0
- package/dist/types.d.ts +33 -0
- package/dist/types.js +13 -0
- package/dist/utils.d.ts +41 -0
- package/dist/utils.js +118 -0
- package/dist/worktree.d.ts +17 -0
- package/dist/worktree.js +239 -0
- package/package.json +43 -0
package/dist/cli.js
ADDED
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
const commander_1 = require("commander");
|
|
5
|
+
const config_js_1 = require("./config.js");
|
|
6
|
+
const daemon_js_1 = require("./daemon.js");
|
|
7
|
+
const enqueue_js_1 = require("./enqueue.js");
|
|
8
|
+
const worktree_js_1 = require("./worktree.js");
|
|
9
|
+
const setup_claude_js_1 = require("./setup-claude.js");
|
|
10
|
+
const node_fs_1 = require("node:fs");
|
|
11
|
+
const node_path_1 = require("node:path");
|
|
12
|
+
const utils_js_1 = require("./utils.js");
|
|
13
|
+
const program = new commander_1.Command();
|
|
14
|
+
program
|
|
15
|
+
.name("xbq")
|
|
16
|
+
.description("Serial build queue for Xcode projects with git worktrees")
|
|
17
|
+
.version("0.1.0");
|
|
18
|
+
// --- init ---
|
|
19
|
+
program
|
|
20
|
+
.command("init [repo-path]")
|
|
21
|
+
.description("Initialize xbq with your main Xcode repo path")
|
|
22
|
+
.action(async (repoPath) => {
|
|
23
|
+
await (0, config_js_1.initConfig)(repoPath);
|
|
24
|
+
});
|
|
25
|
+
// --- build ---
|
|
26
|
+
program
|
|
27
|
+
.command("build")
|
|
28
|
+
.description("Enqueue a build job")
|
|
29
|
+
.option("-b, --branch <branch>", "Branch to build (optional in worktree)")
|
|
30
|
+
.option("-s, --scheme <scheme>", "Xcode scheme")
|
|
31
|
+
.option("-d, --destination <dest>", "Simulator destination (e.g. 'platform=iOS Simulator,name=iPhone 16,OS=18.0')")
|
|
32
|
+
.option("--backend <backend>", "Backend: mcp or xcodebuild")
|
|
33
|
+
.option("--timeout <seconds>", "Timeout in seconds", "1800")
|
|
34
|
+
.action(async (opts) => {
|
|
35
|
+
const result = await (0, enqueue_js_1.enqueueAndWait)({
|
|
36
|
+
action: "build",
|
|
37
|
+
branch: opts.branch,
|
|
38
|
+
scheme: opts.scheme,
|
|
39
|
+
destination: opts.destination,
|
|
40
|
+
backend: opts.backend,
|
|
41
|
+
timeout: parseInt(opts.timeout),
|
|
42
|
+
});
|
|
43
|
+
printResult(result);
|
|
44
|
+
process.exit(result.status === "passed" ? 0 : 1);
|
|
45
|
+
});
|
|
46
|
+
// --- test ---
|
|
47
|
+
program
|
|
48
|
+
.command("test")
|
|
49
|
+
.description("Enqueue a test job")
|
|
50
|
+
.option("-b, --branch <branch>", "Branch to test (optional in worktree)")
|
|
51
|
+
.option("-s, --scheme <scheme>", "Xcode scheme")
|
|
52
|
+
.option("-t, --test-plan <plan>", "Test plan name")
|
|
53
|
+
.option("-d, --destination <dest>", "Simulator destination")
|
|
54
|
+
.option("--backend <backend>", "Backend: mcp or xcodebuild")
|
|
55
|
+
.option("--timeout <seconds>", "Timeout in seconds", "1800")
|
|
56
|
+
.action(async (opts) => {
|
|
57
|
+
const result = await (0, enqueue_js_1.enqueueAndWait)({
|
|
58
|
+
action: "test",
|
|
59
|
+
branch: opts.branch,
|
|
60
|
+
scheme: opts.scheme,
|
|
61
|
+
testPlan: opts.testPlan,
|
|
62
|
+
destination: opts.destination,
|
|
63
|
+
backend: opts.backend,
|
|
64
|
+
timeout: parseInt(opts.timeout),
|
|
65
|
+
});
|
|
66
|
+
printResult(result);
|
|
67
|
+
process.exit(result.status === "passed" ? 0 : 1);
|
|
68
|
+
});
|
|
69
|
+
// --- daemon ---
|
|
70
|
+
const daemon = program
|
|
71
|
+
.command("daemon")
|
|
72
|
+
.description("Manage the build queue daemon");
|
|
73
|
+
daemon
|
|
74
|
+
.command("start")
|
|
75
|
+
.description("Start the daemon")
|
|
76
|
+
.option("-f, --foreground", "Run in foreground")
|
|
77
|
+
.action(async (opts) => {
|
|
78
|
+
if (opts.foreground) {
|
|
79
|
+
await (0, daemon_js_1.startDaemon)({ foreground: true });
|
|
80
|
+
}
|
|
81
|
+
else {
|
|
82
|
+
await (0, daemon_js_1.startDaemon)();
|
|
83
|
+
}
|
|
84
|
+
});
|
|
85
|
+
daemon
|
|
86
|
+
.command("stop")
|
|
87
|
+
.description("Stop the daemon")
|
|
88
|
+
.action(() => {
|
|
89
|
+
(0, daemon_js_1.stopDaemon)();
|
|
90
|
+
});
|
|
91
|
+
daemon
|
|
92
|
+
.command("status")
|
|
93
|
+
.description("Show daemon and queue status")
|
|
94
|
+
.action(() => {
|
|
95
|
+
(0, daemon_js_1.daemonStatus)();
|
|
96
|
+
});
|
|
97
|
+
// --- status ---
|
|
98
|
+
program
|
|
99
|
+
.command("status")
|
|
100
|
+
.description("Show queue status")
|
|
101
|
+
.action(() => {
|
|
102
|
+
(0, daemon_js_1.daemonStatus)();
|
|
103
|
+
});
|
|
104
|
+
// --- logs ---
|
|
105
|
+
program
|
|
106
|
+
.command("logs [job-id]")
|
|
107
|
+
.description("View build logs")
|
|
108
|
+
.option("-n, --lines <n>", "Number of lines to show", "50")
|
|
109
|
+
.action((jobId, opts) => {
|
|
110
|
+
if (jobId) {
|
|
111
|
+
const logPath = (0, node_path_1.join)(utils_js_1.BQ_LOGS_DIR, `${jobId}.log`);
|
|
112
|
+
if (!(0, node_fs_1.existsSync)(logPath)) {
|
|
113
|
+
utils_js_1.log.error(`Log not found: ${logPath}`);
|
|
114
|
+
process.exit(1);
|
|
115
|
+
}
|
|
116
|
+
const content = (0, node_fs_1.readFileSync)(logPath, "utf-8");
|
|
117
|
+
const lines = content.split("\n");
|
|
118
|
+
const n = parseInt(opts?.lines || "50");
|
|
119
|
+
console.log(lines.slice(-n).join("\n"));
|
|
120
|
+
}
|
|
121
|
+
else {
|
|
122
|
+
// List recent results
|
|
123
|
+
if (!(0, node_fs_1.existsSync)(utils_js_1.BQ_RESULTS_DIR)) {
|
|
124
|
+
utils_js_1.log.info("No results yet");
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
const files = (0, node_fs_1.readdirSync)(utils_js_1.BQ_RESULTS_DIR)
|
|
128
|
+
.filter(f => f.endsWith(".json"))
|
|
129
|
+
.sort()
|
|
130
|
+
.reverse()
|
|
131
|
+
.slice(0, 10);
|
|
132
|
+
if (files.length === 0) {
|
|
133
|
+
utils_js_1.log.info("No results yet");
|
|
134
|
+
return;
|
|
135
|
+
}
|
|
136
|
+
console.log("Recent jobs:");
|
|
137
|
+
for (const f of files) {
|
|
138
|
+
const r = JSON.parse((0, node_fs_1.readFileSync)((0, node_path_1.join)(utils_js_1.BQ_RESULTS_DIR, f), "utf-8"));
|
|
139
|
+
const icon = r.status === "passed" ? "\x1b[32m\u2713\x1b[0m" : r.status === "failed" ? "\x1b[31m\u2717\x1b[0m" : "\x1b[33m!\x1b[0m";
|
|
140
|
+
console.log(` ${icon} ${r.id} ${r.summary} (${r.duration_seconds}s)`);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
});
|
|
144
|
+
// --- worktree ---
|
|
145
|
+
const worktree = program
|
|
146
|
+
.command("worktree")
|
|
147
|
+
.description("Manage git worktrees for parallel sessions");
|
|
148
|
+
worktree
|
|
149
|
+
.command("new [name]")
|
|
150
|
+
.description("Create a new worktree (and optionally start Claude Code)")
|
|
151
|
+
.option("-c, --claude", "Start Claude Code in the new worktree")
|
|
152
|
+
.action((name, opts) => {
|
|
153
|
+
const path = (0, worktree_js_1.createWorktree)(name, { startClaude: opts?.claude });
|
|
154
|
+
console.log(path);
|
|
155
|
+
});
|
|
156
|
+
worktree
|
|
157
|
+
.command("list")
|
|
158
|
+
.description("List all worktrees")
|
|
159
|
+
.action(() => {
|
|
160
|
+
(0, worktree_js_1.listWorktrees)();
|
|
161
|
+
});
|
|
162
|
+
worktree
|
|
163
|
+
.command("clean")
|
|
164
|
+
.description("Remove merged and stale worktrees")
|
|
165
|
+
.option("-f, --force", "Force remove all non-main worktrees")
|
|
166
|
+
.option("-d, --days <n>", "Max age in days for stale worktrees", "7")
|
|
167
|
+
.action((opts) => {
|
|
168
|
+
(0, worktree_js_1.cleanWorktrees)({
|
|
169
|
+
force: opts.force,
|
|
170
|
+
maxAgeDays: parseInt(opts.days),
|
|
171
|
+
});
|
|
172
|
+
});
|
|
173
|
+
// --- session (shortcut: worktree new + claude) ---
|
|
174
|
+
program
|
|
175
|
+
.command("session [name]")
|
|
176
|
+
.description("Create a worktree and start Claude Code in it")
|
|
177
|
+
.action((name) => {
|
|
178
|
+
(0, worktree_js_1.createWorktree)(name, { startClaude: true });
|
|
179
|
+
});
|
|
180
|
+
// --- setup-claude ---
|
|
181
|
+
program
|
|
182
|
+
.command("setup-claude [dir]")
|
|
183
|
+
.description("Inject xbq instructions into CLAUDE.md (run in worktree root)")
|
|
184
|
+
.option("--remove", "Remove xbq instructions from CLAUDE.md")
|
|
185
|
+
.action((dir, opts) => {
|
|
186
|
+
if (opts?.remove) {
|
|
187
|
+
(0, setup_claude_js_1.removeClaude)(dir);
|
|
188
|
+
}
|
|
189
|
+
else {
|
|
190
|
+
(0, setup_claude_js_1.setupClaude)(dir);
|
|
191
|
+
}
|
|
192
|
+
});
|
|
193
|
+
// --- clean ---
|
|
194
|
+
program
|
|
195
|
+
.command("clean")
|
|
196
|
+
.description("Clean old results and logs")
|
|
197
|
+
.option("-d, --days <n>", "Remove results older than N days", "7")
|
|
198
|
+
.action((opts) => {
|
|
199
|
+
const maxAge = parseInt(opts.days) * 86400 * 1000;
|
|
200
|
+
const now = Date.now();
|
|
201
|
+
let cleaned = 0;
|
|
202
|
+
for (const dir of [utils_js_1.BQ_RESULTS_DIR, utils_js_1.BQ_LOGS_DIR]) {
|
|
203
|
+
if (!(0, node_fs_1.existsSync)(dir))
|
|
204
|
+
continue;
|
|
205
|
+
for (const f of (0, node_fs_1.readdirSync)(dir)) {
|
|
206
|
+
const path = (0, node_path_1.join)(dir, f);
|
|
207
|
+
const stat = require("node:fs").statSync(path);
|
|
208
|
+
if (now - stat.mtimeMs > maxAge) {
|
|
209
|
+
require("node:fs").unlinkSync(path);
|
|
210
|
+
cleaned++;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
utils_js_1.log.ok(`Cleaned ${cleaned} old files`);
|
|
215
|
+
});
|
|
216
|
+
function printResult(result) {
|
|
217
|
+
console.log();
|
|
218
|
+
if (result.status === "passed") {
|
|
219
|
+
utils_js_1.log.ok(result.summary);
|
|
220
|
+
}
|
|
221
|
+
else if (result.status === "failed") {
|
|
222
|
+
utils_js_1.log.error(result.summary);
|
|
223
|
+
if (result.build_errors.length > 0) {
|
|
224
|
+
console.log("\n Build errors:");
|
|
225
|
+
for (const e of result.build_errors.slice(0, 10)) {
|
|
226
|
+
console.log(` \x1b[31m\u2022\x1b[0m ${e}`);
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
if (result.failures.length > 0) {
|
|
230
|
+
console.log("\n Test failures:");
|
|
231
|
+
for (const f of result.failures.slice(0, 10)) {
|
|
232
|
+
console.log(` \x1b[31m\u2022\x1b[0m ${f}`);
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
else {
|
|
237
|
+
utils_js_1.log.error(result.summary);
|
|
238
|
+
}
|
|
239
|
+
if (result.log_path) {
|
|
240
|
+
console.log(`\n Full log: xbq logs ${result.id}`);
|
|
241
|
+
}
|
|
242
|
+
console.log();
|
|
243
|
+
}
|
|
244
|
+
program.parse();
|
package/dist/config.d.ts
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { type BQConfig } from "./types.js";
|
|
2
|
+
export declare function loadConfig(): BQConfig;
|
|
3
|
+
export declare function saveConfig(config: BQConfig): void;
|
|
4
|
+
export declare function getMainRepo(): string;
|
|
5
|
+
/**
|
|
6
|
+
* Auto-detect an .xcworkspace in the given directory.
|
|
7
|
+
* Excludes project.xcworkspace (Xcode internal).
|
|
8
|
+
*/
|
|
9
|
+
export declare function detectWorkspace(repoPath: string): string | null;
|
|
10
|
+
/**
|
|
11
|
+
* Derive a scheme name from a workspace name.
|
|
12
|
+
* "Foo.xcworkspace" → "Foo"
|
|
13
|
+
*/
|
|
14
|
+
export declare function detectScheme(_repoPath: string, workspace: string): string;
|
|
15
|
+
export declare function initConfig(repoPath?: string): Promise<void>;
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.loadConfig = loadConfig;
|
|
4
|
+
exports.saveConfig = saveConfig;
|
|
5
|
+
exports.getMainRepo = getMainRepo;
|
|
6
|
+
exports.detectWorkspace = detectWorkspace;
|
|
7
|
+
exports.detectScheme = detectScheme;
|
|
8
|
+
exports.initConfig = initConfig;
|
|
9
|
+
const node_fs_1 = require("node:fs");
|
|
10
|
+
const utils_js_1 = require("./utils.js");
|
|
11
|
+
const types_js_1 = require("./types.js");
|
|
12
|
+
function loadConfig() {
|
|
13
|
+
if (!(0, node_fs_1.existsSync)(utils_js_1.BQ_CONFIG_PATH)) {
|
|
14
|
+
return { ...types_js_1.DEFAULT_CONFIG };
|
|
15
|
+
}
|
|
16
|
+
const raw = (0, node_fs_1.readFileSync)(utils_js_1.BQ_CONFIG_PATH, "utf-8");
|
|
17
|
+
return { ...types_js_1.DEFAULT_CONFIG, ...JSON.parse(raw) };
|
|
18
|
+
}
|
|
19
|
+
function saveConfig(config) {
|
|
20
|
+
(0, utils_js_1.ensureDirs)();
|
|
21
|
+
(0, node_fs_1.writeFileSync)(utils_js_1.BQ_CONFIG_PATH, JSON.stringify(config, null, 2) + "\n");
|
|
22
|
+
}
|
|
23
|
+
function getMainRepo() {
|
|
24
|
+
const config = loadConfig();
|
|
25
|
+
if (!config.main_repo) {
|
|
26
|
+
utils_js_1.log.error("No main repo configured. Run 'xbq init' first.");
|
|
27
|
+
process.exit(1);
|
|
28
|
+
}
|
|
29
|
+
return (0, utils_js_1.expandPath)(config.main_repo);
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Auto-detect an .xcworkspace in the given directory.
|
|
33
|
+
* Excludes project.xcworkspace (Xcode internal).
|
|
34
|
+
*/
|
|
35
|
+
function detectWorkspace(repoPath) {
|
|
36
|
+
try {
|
|
37
|
+
const entries = (0, node_fs_1.readdirSync)(repoPath).filter((e) => e.endsWith(".xcworkspace") && e !== "project.xcworkspace");
|
|
38
|
+
return entries.length > 0 ? entries[0] : null;
|
|
39
|
+
}
|
|
40
|
+
catch {
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Derive a scheme name from a workspace name.
|
|
46
|
+
* "Foo.xcworkspace" → "Foo"
|
|
47
|
+
*/
|
|
48
|
+
function detectScheme(_repoPath, workspace) {
|
|
49
|
+
return workspace.replace(".xcworkspace", "");
|
|
50
|
+
}
|
|
51
|
+
async function initConfig(repoPath) {
|
|
52
|
+
(0, utils_js_1.ensureDirs)();
|
|
53
|
+
const config = loadConfig();
|
|
54
|
+
// Resolve repo path
|
|
55
|
+
let repo = repoPath || config.main_repo;
|
|
56
|
+
if (!repo) {
|
|
57
|
+
// Try current directory
|
|
58
|
+
const cwd = process.cwd();
|
|
59
|
+
if ((0, node_fs_1.existsSync)(`${cwd}/.git`)) {
|
|
60
|
+
repo = cwd;
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
utils_js_1.log.error("Please provide the main repo path: xbq init <path>");
|
|
64
|
+
process.exit(1);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
repo = (0, utils_js_1.expandPath)(repo);
|
|
68
|
+
// Validate
|
|
69
|
+
if (!(0, node_fs_1.existsSync)(repo)) {
|
|
70
|
+
utils_js_1.log.error(`Directory not found: ${repo}`);
|
|
71
|
+
process.exit(1);
|
|
72
|
+
}
|
|
73
|
+
if (!(0, node_fs_1.existsSync)(`${repo}/.git`)) {
|
|
74
|
+
utils_js_1.log.error(`Not a git repo: ${repo}`);
|
|
75
|
+
process.exit(1);
|
|
76
|
+
}
|
|
77
|
+
config.main_repo = repo;
|
|
78
|
+
// Auto-detect workspace
|
|
79
|
+
const workspace = detectWorkspace(repo);
|
|
80
|
+
if (workspace) {
|
|
81
|
+
config.workspace = workspace;
|
|
82
|
+
utils_js_1.log.ok(`Found workspace: ${config.workspace}`);
|
|
83
|
+
}
|
|
84
|
+
else {
|
|
85
|
+
utils_js_1.log.warn("No .xcworkspace found — set workspace manually in ~/.bq/config.json");
|
|
86
|
+
}
|
|
87
|
+
// Auto-detect scheme from workspace name
|
|
88
|
+
if (config.workspace) {
|
|
89
|
+
config.default_scheme = detectScheme(repo, config.workspace);
|
|
90
|
+
utils_js_1.log.ok(`Default scheme: ${config.default_scheme}`);
|
|
91
|
+
}
|
|
92
|
+
// Detect backend
|
|
93
|
+
try {
|
|
94
|
+
(0, utils_js_1.run)("xcrun --find mcpbridge", { quiet: true });
|
|
95
|
+
config.backend = "mcp";
|
|
96
|
+
utils_js_1.log.ok("Xcode MCP (mcpbridge) found — default backend: mcp");
|
|
97
|
+
}
|
|
98
|
+
catch {
|
|
99
|
+
config.backend = "xcodebuild";
|
|
100
|
+
utils_js_1.log.warn("mcpbridge not found — default backend: xcodebuild");
|
|
101
|
+
}
|
|
102
|
+
config.xcodebuild_fallback = true;
|
|
103
|
+
config.git_restore_mtime = true;
|
|
104
|
+
saveConfig(config);
|
|
105
|
+
utils_js_1.log.ok(`Configuration saved to ${utils_js_1.BQ_CONFIG_PATH}`);
|
|
106
|
+
console.log();
|
|
107
|
+
console.log(` Main repo: ${config.main_repo}`);
|
|
108
|
+
console.log(` Workspace: ${config.workspace || "(not detected)"}`);
|
|
109
|
+
console.log(` Scheme: ${config.default_scheme || "(not detected)"}`);
|
|
110
|
+
console.log(` Destination: ${config.default_destination}`);
|
|
111
|
+
console.log(` Backend: ${config.backend}`);
|
|
112
|
+
console.log();
|
|
113
|
+
utils_js_1.log.info("Next: run 'xbq daemon start' to start the build queue");
|
|
114
|
+
}
|
package/dist/daemon.d.ts
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Start the daemon in foreground (for development) or detect if already running.
|
|
3
|
+
*/
|
|
4
|
+
export declare function startDaemon(opts?: {
|
|
5
|
+
foreground?: boolean;
|
|
6
|
+
}): Promise<void>;
|
|
7
|
+
export declare function stopDaemon(): void;
|
|
8
|
+
export declare function daemonStatus(): void;
|
package/dist/daemon.js
ADDED
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.startDaemon = startDaemon;
|
|
4
|
+
exports.stopDaemon = stopDaemon;
|
|
5
|
+
exports.daemonStatus = daemonStatus;
|
|
6
|
+
const node_fs_1 = require("node:fs");
|
|
7
|
+
const node_path_1 = require("node:path");
|
|
8
|
+
const executor_js_1 = require("./executor.js");
|
|
9
|
+
const utils_js_1 = require("./utils.js");
|
|
10
|
+
const POLL_INTERVAL_MS = 1000;
|
|
11
|
+
/**
|
|
12
|
+
* Start the daemon in foreground (for development) or detect if already running.
|
|
13
|
+
*/
|
|
14
|
+
async function startDaemon(opts = {}) {
|
|
15
|
+
(0, utils_js_1.ensureDirs)();
|
|
16
|
+
// Check if already running
|
|
17
|
+
if ((0, node_fs_1.existsSync)(utils_js_1.BQ_PID_FILE)) {
|
|
18
|
+
const existingPid = parseInt((0, node_fs_1.readFileSync)(utils_js_1.BQ_PID_FILE, "utf-8").trim());
|
|
19
|
+
if ((0, utils_js_1.isProcessAlive)(existingPid)) {
|
|
20
|
+
utils_js_1.log.warn(`Daemon already running (PID: ${existingPid})`);
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
// Stale PID file
|
|
24
|
+
(0, node_fs_1.unlinkSync)(utils_js_1.BQ_PID_FILE);
|
|
25
|
+
}
|
|
26
|
+
// Write PID
|
|
27
|
+
(0, node_fs_1.writeFileSync)(utils_js_1.BQ_PID_FILE, String(process.pid));
|
|
28
|
+
utils_js_1.log.ok(`Daemon started (PID: ${process.pid})`);
|
|
29
|
+
// Clean up stale active jobs
|
|
30
|
+
cleanupActive();
|
|
31
|
+
// Handle signals for graceful shutdown
|
|
32
|
+
const shutdown = () => {
|
|
33
|
+
utils_js_1.log.info("Daemon shutting down...");
|
|
34
|
+
try {
|
|
35
|
+
(0, node_fs_1.unlinkSync)(utils_js_1.BQ_PID_FILE);
|
|
36
|
+
}
|
|
37
|
+
catch { }
|
|
38
|
+
process.exit(0);
|
|
39
|
+
};
|
|
40
|
+
process.on("SIGINT", shutdown);
|
|
41
|
+
process.on("SIGTERM", shutdown);
|
|
42
|
+
// Main loop
|
|
43
|
+
while (true) {
|
|
44
|
+
try {
|
|
45
|
+
const job = pickNextJob();
|
|
46
|
+
if (job) {
|
|
47
|
+
await processJob(job);
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
catch (err) {
|
|
51
|
+
utils_js_1.log.error(`Daemon error: ${err}`);
|
|
52
|
+
}
|
|
53
|
+
await sleep(POLL_INTERVAL_MS);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
function stopDaemon() {
|
|
57
|
+
if (!(0, node_fs_1.existsSync)(utils_js_1.BQ_PID_FILE)) {
|
|
58
|
+
utils_js_1.log.warn("Daemon is not running");
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
const pid = parseInt((0, node_fs_1.readFileSync)(utils_js_1.BQ_PID_FILE, "utf-8").trim());
|
|
62
|
+
if ((0, utils_js_1.isProcessAlive)(pid)) {
|
|
63
|
+
process.kill(pid, "SIGTERM");
|
|
64
|
+
utils_js_1.log.ok(`Daemon stopped (PID: ${pid})`);
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
utils_js_1.log.warn("Daemon was not running (stale PID file)");
|
|
68
|
+
}
|
|
69
|
+
try {
|
|
70
|
+
(0, node_fs_1.unlinkSync)(utils_js_1.BQ_PID_FILE);
|
|
71
|
+
}
|
|
72
|
+
catch { }
|
|
73
|
+
}
|
|
74
|
+
function daemonStatus() {
|
|
75
|
+
if (!(0, node_fs_1.existsSync)(utils_js_1.BQ_PID_FILE)) {
|
|
76
|
+
utils_js_1.log.info("Daemon is not running");
|
|
77
|
+
showQueueStatus();
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
const pid = parseInt((0, node_fs_1.readFileSync)(utils_js_1.BQ_PID_FILE, "utf-8").trim());
|
|
81
|
+
if ((0, utils_js_1.isProcessAlive)(pid)) {
|
|
82
|
+
utils_js_1.log.ok(`Daemon is running (PID: ${pid})`);
|
|
83
|
+
}
|
|
84
|
+
else {
|
|
85
|
+
utils_js_1.log.warn("Daemon PID file exists but process is not running");
|
|
86
|
+
try {
|
|
87
|
+
(0, node_fs_1.unlinkSync)(utils_js_1.BQ_PID_FILE);
|
|
88
|
+
}
|
|
89
|
+
catch { }
|
|
90
|
+
}
|
|
91
|
+
showQueueStatus();
|
|
92
|
+
}
|
|
93
|
+
function showQueueStatus() {
|
|
94
|
+
const queued = (0, node_fs_1.existsSync)(utils_js_1.BQ_QUEUE_DIR) ? (0, node_fs_1.readdirSync)(utils_js_1.BQ_QUEUE_DIR).filter(f => f.endsWith(".json")) : [];
|
|
95
|
+
const active = (0, node_fs_1.existsSync)(utils_js_1.BQ_ACTIVE_DIR) ? (0, node_fs_1.readdirSync)(utils_js_1.BQ_ACTIVE_DIR).filter(f => f.endsWith(".json")) : [];
|
|
96
|
+
console.log();
|
|
97
|
+
if (active.length > 0) {
|
|
98
|
+
for (const f of active) {
|
|
99
|
+
const job = JSON.parse((0, node_fs_1.readFileSync)((0, node_path_1.join)(utils_js_1.BQ_ACTIVE_DIR, f), "utf-8"));
|
|
100
|
+
const source = job.snapshot_sha ? `snapshot ${job.snapshot_sha.slice(0, 8)}` : `branch ${job.branch}`;
|
|
101
|
+
utils_js_1.log.status(`Running: ${job.action} on ${source} (${job.id})`);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
if (queued.length > 0) {
|
|
105
|
+
utils_js_1.log.info(`Queued: ${queued.length} job(s)`);
|
|
106
|
+
for (const f of queued.sort()) {
|
|
107
|
+
const job = JSON.parse((0, node_fs_1.readFileSync)((0, node_path_1.join)(utils_js_1.BQ_QUEUE_DIR, f), "utf-8"));
|
|
108
|
+
const source = job.snapshot_sha ? `snapshot ${job.snapshot_sha.slice(0, 8)}` : `branch ${job.branch}`;
|
|
109
|
+
console.log(` - ${job.action} on ${source} (${job.id})`);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
else if (active.length === 0) {
|
|
113
|
+
utils_js_1.log.info("Queue is empty");
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Pick the oldest job from the queue (FIFO).
|
|
118
|
+
*/
|
|
119
|
+
function pickNextJob() {
|
|
120
|
+
if (!(0, node_fs_1.existsSync)(utils_js_1.BQ_QUEUE_DIR))
|
|
121
|
+
return null;
|
|
122
|
+
const files = (0, node_fs_1.readdirSync)(utils_js_1.BQ_QUEUE_DIR)
|
|
123
|
+
.filter(f => f.endsWith(".json"))
|
|
124
|
+
.sort(); // Lexicographic = chronological since IDs are timestamp-prefixed
|
|
125
|
+
if (files.length === 0)
|
|
126
|
+
return null;
|
|
127
|
+
const file = files[0];
|
|
128
|
+
const jobPath = (0, node_path_1.join)(utils_js_1.BQ_QUEUE_DIR, file);
|
|
129
|
+
const activePath = (0, node_path_1.join)(utils_js_1.BQ_ACTIVE_DIR, file);
|
|
130
|
+
// Move to active
|
|
131
|
+
const job = JSON.parse((0, node_fs_1.readFileSync)(jobPath, "utf-8"));
|
|
132
|
+
(0, node_fs_1.renameSync)(jobPath, activePath);
|
|
133
|
+
return job;
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* Process a job and write the result.
|
|
137
|
+
*/
|
|
138
|
+
async function processJob(job) {
|
|
139
|
+
utils_js_1.log.info(`Processing job: ${job.id} (${job.action} on ${job.branch})`);
|
|
140
|
+
const activeFile = (0, node_path_1.join)(utils_js_1.BQ_ACTIVE_DIR, `${job.id}.json`);
|
|
141
|
+
let result;
|
|
142
|
+
try {
|
|
143
|
+
result = await (0, executor_js_1.executeJob)(job);
|
|
144
|
+
}
|
|
145
|
+
catch (err) {
|
|
146
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
147
|
+
result = {
|
|
148
|
+
id: job.id,
|
|
149
|
+
status: "error",
|
|
150
|
+
duration_seconds: 0,
|
|
151
|
+
summary: message,
|
|
152
|
+
failures: [],
|
|
153
|
+
build_errors: [message],
|
|
154
|
+
warnings: [],
|
|
155
|
+
log_path: "",
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
// Write result
|
|
159
|
+
(0, node_fs_1.writeFileSync)((0, node_path_1.join)(utils_js_1.BQ_RESULTS_DIR, `${job.id}.json`), JSON.stringify(result, null, 2) + "\n");
|
|
160
|
+
// Remove from active
|
|
161
|
+
try {
|
|
162
|
+
(0, node_fs_1.unlinkSync)(activeFile);
|
|
163
|
+
}
|
|
164
|
+
catch { }
|
|
165
|
+
if (result.status === "passed") {
|
|
166
|
+
utils_js_1.log.ok(`Job ${job.id}: ${result.summary} (${result.duration_seconds}s)`);
|
|
167
|
+
}
|
|
168
|
+
else {
|
|
169
|
+
utils_js_1.log.error(`Job ${job.id}: ${result.summary} (${result.duration_seconds}s)`);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
/**
|
|
173
|
+
* Clean up stale active jobs (from crashed daemon).
|
|
174
|
+
*/
|
|
175
|
+
function cleanupActive() {
|
|
176
|
+
if (!(0, node_fs_1.existsSync)(utils_js_1.BQ_ACTIVE_DIR))
|
|
177
|
+
return;
|
|
178
|
+
const files = (0, node_fs_1.readdirSync)(utils_js_1.BQ_ACTIVE_DIR).filter(f => f.endsWith(".json"));
|
|
179
|
+
for (const f of files) {
|
|
180
|
+
utils_js_1.log.warn(`Requeuing stale active job: ${f}`);
|
|
181
|
+
(0, node_fs_1.renameSync)((0, node_path_1.join)(utils_js_1.BQ_ACTIVE_DIR, f), (0, node_path_1.join)(utils_js_1.BQ_QUEUE_DIR, f));
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
function sleep(ms) {
|
|
185
|
+
return new Promise(resolve => setTimeout(resolve, ms));
|
|
186
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { type JobResult } from "./types.js";
|
|
2
|
+
export interface EnqueueOptions {
|
|
3
|
+
action: "build" | "test";
|
|
4
|
+
branch?: string;
|
|
5
|
+
scheme?: string;
|
|
6
|
+
testPlan?: string;
|
|
7
|
+
destination?: string;
|
|
8
|
+
backend?: "mcp" | "xcodebuild";
|
|
9
|
+
timeout?: number;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Enqueue a job, wait for result, and return it.
|
|
13
|
+
*/
|
|
14
|
+
export declare function enqueueAndWait(opts: EnqueueOptions): Promise<JobResult>;
|