@love-moon/conductor-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/conductor-chrome.js +376 -0
- package/bin/conductor-config.js +82 -0
- package/bin/conductor-daemon.js +67 -0
- package/bin/conductor-fire.js +903 -0
- package/package.json +34 -0
- package/src/daemon.js +376 -0
- package/src/fire/history.js +605 -0
- package/src/pageAutomation.js +131 -0
- package/src/providers/deepseek.js +405 -0
- package/src/providers/generic.js +6 -0
- package/src/providers/qwen.js +203 -0
package/package.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@love-moon/conductor-cli",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"bin": {
|
|
6
|
+
"conductor-cli": "bin/conductor-fire.js",
|
|
7
|
+
"conductor-fire": "bin/conductor-fire.js",
|
|
8
|
+
"conductor-daemon": "bin/conductor-daemon.js",
|
|
9
|
+
"conductor-config": "bin/conductor-config.js"
|
|
10
|
+
},
|
|
11
|
+
"files": [
|
|
12
|
+
"bin",
|
|
13
|
+
"src"
|
|
14
|
+
],
|
|
15
|
+
"publishConfig": {
|
|
16
|
+
"access": "public"
|
|
17
|
+
},
|
|
18
|
+
"scripts": {
|
|
19
|
+
"test": "node --test"
|
|
20
|
+
},
|
|
21
|
+
"dependencies": {
|
|
22
|
+
"@conductor/cli2sdk": "0.1.0",
|
|
23
|
+
"@conductor/sdk": "0.1.0",
|
|
24
|
+
"@modelcontextprotocol/sdk": "^1.20.2",
|
|
25
|
+
"@openai/codex-sdk": "^0.58.0",
|
|
26
|
+
"dotenv": "^16.4.5",
|
|
27
|
+
"enquirer": "^2.4.1",
|
|
28
|
+
"js-yaml": "^4.1.1",
|
|
29
|
+
"ws": "^8.18.0",
|
|
30
|
+
"yargs": "^17.7.2",
|
|
31
|
+
"chrome-launcher": "^1.2.1",
|
|
32
|
+
"chrome-remote-interface": "^0.33.0"
|
|
33
|
+
}
|
|
34
|
+
}
|
package/src/daemon.js
ADDED
|
@@ -0,0 +1,376 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import os from "node:os";
|
|
4
|
+
import { spawn } from "node:child_process";
|
|
5
|
+
import { fileURLToPath } from "node:url";
|
|
6
|
+
|
|
7
|
+
import dotenv from "dotenv";
|
|
8
|
+
import yaml from "js-yaml";
|
|
9
|
+
|
|
10
|
+
import { ConductorWebSocketClient, ConductorConfig, loadConfig, ConfigFileNotFound } from "@conductor/sdk";
|
|
11
|
+
|
|
12
|
+
dotenv.config();
|
|
13
|
+
|
|
14
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
15
|
+
const __dirname = path.dirname(__filename);
|
|
16
|
+
const CLI_PATH = path.resolve(__dirname, "..", "bin", "conductor-fire.js");
|
|
17
|
+
|
|
18
|
+
function log(message) {
|
|
19
|
+
const ts = new Date().toLocaleString("sv-SE", { timeZone: "Asia/Shanghai" }).replace(" ", "T");
|
|
20
|
+
process.stdout.write(`[conductor-daemon ${ts}] ${message}\n`);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
function logError(message) {
|
|
24
|
+
const ts = new Date().toLocaleString("sv-SE", { timeZone: "Asia/Shanghai" }).replace(" ", "T");
|
|
25
|
+
process.stderr.write(`[conductor-daemon ${ts}] ${message}\n`);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function getUserConfig(configFilePath) {
|
|
29
|
+
try {
|
|
30
|
+
const home = os.homedir();
|
|
31
|
+
const configPath = configFilePath || path.join(home, ".conductor", "config.yaml");
|
|
32
|
+
if (fs.existsSync(configPath)) {
|
|
33
|
+
const content = fs.readFileSync(configPath, "utf8");
|
|
34
|
+
const parsed = yaml.load(content);
|
|
35
|
+
if (parsed && typeof parsed === "object") {
|
|
36
|
+
return parsed;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
} catch (error) {
|
|
40
|
+
// ignore error
|
|
41
|
+
}
|
|
42
|
+
return {};
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export function startDaemon(config = {}, deps = {}) {
|
|
46
|
+
let fileConfig;
|
|
47
|
+
try {
|
|
48
|
+
fileConfig = loadConfig(config.CONFIG_FILE);
|
|
49
|
+
log(`Loaded config from ${config.CONFIG_FILE || "~/.conductor/config.yaml"}`);
|
|
50
|
+
} catch (err) {
|
|
51
|
+
if (!(err instanceof ConfigFileNotFound)) {
|
|
52
|
+
log(`Failed to load config: ${err.message}`);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
const userConfig = getUserConfig(config.CONFIG_FILE);
|
|
57
|
+
const explicitWsUrl =
|
|
58
|
+
config.BACKEND_URL ||
|
|
59
|
+
process.env.CONDUCTOR_BACKEND_WS_URL ||
|
|
60
|
+
process.env.CONDUCTOR_WS_URL ||
|
|
61
|
+
null;
|
|
62
|
+
const derivedHttpFromWs = explicitWsUrl ? deriveBackendHttpFromWebsocket(explicitWsUrl) : null;
|
|
63
|
+
const BACKEND_HTTP =
|
|
64
|
+
config.BACKEND_HTTP ||
|
|
65
|
+
process.env.CONDUCTOR_BACKEND_URL ||
|
|
66
|
+
derivedHttpFromWs ||
|
|
67
|
+
fileConfig?.backendUrl ||
|
|
68
|
+
"http://localhost:6152";
|
|
69
|
+
const BACKEND_URL =
|
|
70
|
+
explicitWsUrl ||
|
|
71
|
+
deriveWebsocketUrlFromHttp(BACKEND_HTTP);
|
|
72
|
+
const AGENT_TOKEN =
|
|
73
|
+
config.AGENT_TOKEN || process.env.CONDUCTOR_AGENT_TOKEN || fileConfig?.agentToken || "default-agent-token";
|
|
74
|
+
const AGENT_NAME = (config.NAME || process.env.CONDUCTOR_DAEMON_NAME || os.hostname()).trim();
|
|
75
|
+
if (!AGENT_NAME) {
|
|
76
|
+
logError("Daemon name is required. Set --name or CONDUCTOR_DAEMON_NAME.");
|
|
77
|
+
process.exit(1);
|
|
78
|
+
}
|
|
79
|
+
const WORKSPACE_ROOT =
|
|
80
|
+
config.WORKSPACE_ROOT ||
|
|
81
|
+
process.env.CONDUCTOR_WS ||
|
|
82
|
+
userConfig.workspace ||
|
|
83
|
+
path.join(process.env.HOME || "/tmp", "ws");
|
|
84
|
+
const CLI_PATH_VAL = config.CLI_PATH || CLI_PATH;
|
|
85
|
+
|
|
86
|
+
const spawnFn = deps.spawn || spawn;
|
|
87
|
+
const mkdirSyncFn = deps.mkdirSync || fs.mkdirSync;
|
|
88
|
+
const writeFileSyncFn = deps.writeFileSync || fs.writeFileSync;
|
|
89
|
+
const existsSyncFn = deps.existsSync || fs.existsSync;
|
|
90
|
+
const readFileSyncFn = deps.readFileSync || fs.readFileSync;
|
|
91
|
+
const unlinkSyncFn = deps.unlinkSync || fs.unlinkSync;
|
|
92
|
+
const createWriteStreamFn = deps.createWriteStream || fs.createWriteStream;
|
|
93
|
+
const fetchFn = deps.fetch || fetch;
|
|
94
|
+
const exitFn = deps.exit || process.exit;
|
|
95
|
+
|
|
96
|
+
try {
|
|
97
|
+
mkdirSyncFn(WORKSPACE_ROOT, { recursive: true });
|
|
98
|
+
} catch (err) {
|
|
99
|
+
logError(`Failed to create workspace root: ${err}`);
|
|
100
|
+
process.exit(1);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const LOCK_FILE = path.join(WORKSPACE_ROOT, "daemon.pid");
|
|
104
|
+
try {
|
|
105
|
+
if (existsSyncFn(LOCK_FILE)) {
|
|
106
|
+
const pid = parseInt(readFileSyncFn(LOCK_FILE, "utf-8"), 10);
|
|
107
|
+
if (!Number.isNaN(pid)) {
|
|
108
|
+
try {
|
|
109
|
+
process.kill(pid, 0);
|
|
110
|
+
logError(`Daemon already running with PID ${pid}`);
|
|
111
|
+
process.exit(1);
|
|
112
|
+
} catch (e) {
|
|
113
|
+
if (e.code === "ESRCH") {
|
|
114
|
+
log("Removing stale lock file");
|
|
115
|
+
unlinkSyncFn(LOCK_FILE);
|
|
116
|
+
} else {
|
|
117
|
+
logError(`Daemon already running with PID ${pid} (access denied)`);
|
|
118
|
+
process.exit(1);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
writeFileSyncFn(LOCK_FILE, process.pid.toString());
|
|
124
|
+
} catch (err) {
|
|
125
|
+
logError("Failed to acquire lock:", err);
|
|
126
|
+
process.exit(1);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
const cleanupLock = () => {
|
|
130
|
+
try {
|
|
131
|
+
if (existsSyncFn(LOCK_FILE)) {
|
|
132
|
+
const pid = parseInt(readFileSyncFn(LOCK_FILE, "utf-8"), 10);
|
|
133
|
+
if (pid === process.pid) {
|
|
134
|
+
unlinkSyncFn(LOCK_FILE);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
} catch (e) {
|
|
138
|
+
// ignore
|
|
139
|
+
}
|
|
140
|
+
};
|
|
141
|
+
|
|
142
|
+
process.on("exit", cleanupLock);
|
|
143
|
+
process.on("SIGINT", () => {
|
|
144
|
+
cleanupLock();
|
|
145
|
+
process.exit();
|
|
146
|
+
});
|
|
147
|
+
process.on("SIGTERM", () => {
|
|
148
|
+
cleanupLock();
|
|
149
|
+
process.exit();
|
|
150
|
+
});
|
|
151
|
+
process.on("uncaughtException", (err) => {
|
|
152
|
+
logError(`Uncaught exception: ${err}`);
|
|
153
|
+
cleanupLock();
|
|
154
|
+
process.exit(1);
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
if (config.CLEAN_ALL) {
|
|
158
|
+
cleanAllAgents(BACKEND_HTTP, AGENT_TOKEN, fetchFn)
|
|
159
|
+
.then((result) => {
|
|
160
|
+
log(`Cleaned stale daemons: removed=${result.removed} remaining=${result.remaining}`);
|
|
161
|
+
})
|
|
162
|
+
.catch((err) => {
|
|
163
|
+
log(`Failed to clean daemons: ${err.message}`);
|
|
164
|
+
})
|
|
165
|
+
.finally(() => exitFn(0));
|
|
166
|
+
return { close: () => {} };
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
log("Daemon starting...");
|
|
170
|
+
log(`Backend: ${BACKEND_URL}`);
|
|
171
|
+
log(`Workspace: ${WORKSPACE_ROOT}`);
|
|
172
|
+
log(`CLI Path: ${CLI_PATH_VAL}`);
|
|
173
|
+
log(`Daemon Name: ${AGENT_NAME}`);
|
|
174
|
+
|
|
175
|
+
const sdkConfig = new ConductorConfig({
|
|
176
|
+
agentToken: AGENT_TOKEN,
|
|
177
|
+
backendUrl: BACKEND_HTTP,
|
|
178
|
+
websocketUrl: BACKEND_URL,
|
|
179
|
+
});
|
|
180
|
+
|
|
181
|
+
const client = new ConductorWebSocketClient(sdkConfig, {
|
|
182
|
+
extraHeaders: {
|
|
183
|
+
"x-conductor-host": AGENT_NAME,
|
|
184
|
+
},
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
client.registerHandler((payload) => {
|
|
188
|
+
handleEvent(payload);
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
client
|
|
192
|
+
.connect()
|
|
193
|
+
.then(() => {
|
|
194
|
+
log("Connected to backend");
|
|
195
|
+
})
|
|
196
|
+
.catch((err) => {
|
|
197
|
+
logError(`Failed to connect: ${err}`);
|
|
198
|
+
});
|
|
199
|
+
|
|
200
|
+
function handleEvent(event) {
|
|
201
|
+
if (event.type === "create_task") {
|
|
202
|
+
handleCreateTask(event.payload);
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
function handleCreateTask(payload) {
|
|
207
|
+
const { task_id: taskId, project_id: projectId, backend_type: backendType, initial_content: initialContent } =
|
|
208
|
+
payload || {};
|
|
209
|
+
|
|
210
|
+
if (!taskId || !projectId) {
|
|
211
|
+
logError(`Invalid create_task payload: ${JSON.stringify(payload)}`);
|
|
212
|
+
return;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
log("");
|
|
216
|
+
log(`Creating task ${taskId} for project ${projectId} (${backendType})`);
|
|
217
|
+
client
|
|
218
|
+
.sendJson({
|
|
219
|
+
type: "task_status_update",
|
|
220
|
+
payload: {
|
|
221
|
+
task_id: taskId,
|
|
222
|
+
project_id: projectId,
|
|
223
|
+
status: "CREATED",
|
|
224
|
+
},
|
|
225
|
+
})
|
|
226
|
+
.catch((err) => {
|
|
227
|
+
logError(`Failed to report task status (CREATED) for ${taskId}: ${err?.message || err}`);
|
|
228
|
+
});
|
|
229
|
+
|
|
230
|
+
const taskDir = path.join(WORKSPACE_ROOT, projectId, taskId);
|
|
231
|
+
mkdirSyncFn(taskDir, { recursive: true });
|
|
232
|
+
|
|
233
|
+
const args = [];
|
|
234
|
+
if (backendType) {
|
|
235
|
+
args.push("--backend", backendType);
|
|
236
|
+
}
|
|
237
|
+
if (initialContent) {
|
|
238
|
+
args.push("--prefill", initialContent);
|
|
239
|
+
}
|
|
240
|
+
// Explicitly separate conductor flags from backend args so they don't leak into messages
|
|
241
|
+
args.push("--");
|
|
242
|
+
|
|
243
|
+
const logPath = path.join(taskDir, ".conductor.log");
|
|
244
|
+
let logStream;
|
|
245
|
+
try {
|
|
246
|
+
logStream = createWriteStreamFn(logPath, { flags: "a" });
|
|
247
|
+
} catch (err) {
|
|
248
|
+
logError(`Failed to open log file ${logPath}: ${err.message}`);
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
log(`New task workspace: ${taskDir}`);
|
|
252
|
+
log(`Logs: ${logPath}`);
|
|
253
|
+
|
|
254
|
+
const env = {
|
|
255
|
+
...process.env,
|
|
256
|
+
CONDUCTOR_PROJECT_ID: projectId,
|
|
257
|
+
CONDUCTOR_TASK_ID: taskId,
|
|
258
|
+
};
|
|
259
|
+
if (config.CONFIG_FILE) {
|
|
260
|
+
env.CONDUCTOR_CONFIG = config.CONFIG_FILE;
|
|
261
|
+
}
|
|
262
|
+
if (AGENT_TOKEN) {
|
|
263
|
+
env.CONDUCTOR_AGENT_TOKEN = AGENT_TOKEN;
|
|
264
|
+
}
|
|
265
|
+
if (BACKEND_HTTP) {
|
|
266
|
+
env.CONDUCTOR_BACKEND_URL = BACKEND_HTTP;
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
const child = spawnFn(process.execPath, [CLI_PATH_VAL, ...args], {
|
|
270
|
+
cwd: taskDir,
|
|
271
|
+
env,
|
|
272
|
+
stdio: ["inherit", "pipe", "pipe"],
|
|
273
|
+
});
|
|
274
|
+
client
|
|
275
|
+
.sendJson({
|
|
276
|
+
type: "task_status_update",
|
|
277
|
+
payload: {
|
|
278
|
+
task_id: taskId,
|
|
279
|
+
project_id: projectId,
|
|
280
|
+
status: "RUNNING",
|
|
281
|
+
},
|
|
282
|
+
})
|
|
283
|
+
.catch((err) => {
|
|
284
|
+
logError(`Failed to report task status (RUNNING) for ${taskId}: ${err?.message || err}`);
|
|
285
|
+
});
|
|
286
|
+
|
|
287
|
+
if (child.stdout && typeof child.stdout.pipe === "function" && logStream) {
|
|
288
|
+
child.stdout.pipe(logStream, { end: false });
|
|
289
|
+
} else if (child.stdout && typeof child.stdout.on === "function" && logStream) {
|
|
290
|
+
child.stdout.on("data", (chunk) => logStream.write(chunk));
|
|
291
|
+
}
|
|
292
|
+
if (child.stderr && typeof child.stderr.pipe === "function" && logStream) {
|
|
293
|
+
child.stderr.pipe(logStream, { end: false });
|
|
294
|
+
} else if (child.stderr && typeof child.stderr.on === "function" && logStream) {
|
|
295
|
+
child.stderr.on("data", (chunk) => logStream.write(chunk));
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
child.on("error", (err) => {
|
|
299
|
+
logError(`Failed to spawn CLI: ${err.message}`);
|
|
300
|
+
if (logStream) {
|
|
301
|
+
const ts = new Date().toLocaleString("sv-SE", { timeZone: "Asia/Shanghai" }).replace(" ", "T");
|
|
302
|
+
logStream.write(`[daemon ${ts}] spawn error: ${err.message}\n`);
|
|
303
|
+
}
|
|
304
|
+
});
|
|
305
|
+
|
|
306
|
+
child.on("exit", (code) => {
|
|
307
|
+
if (logStream) {
|
|
308
|
+
const ts = new Date().toLocaleString("sv-SE", { timeZone: "Asia/Shanghai" }).replace(" ", "T");
|
|
309
|
+
logStream.write(`[daemon ${ts}] process exited with code ${code}\n`);
|
|
310
|
+
logStream.end();
|
|
311
|
+
}
|
|
312
|
+
log(`Task ${taskId} finished with code ${code}`);
|
|
313
|
+
log(`Logs: ${logPath}`);
|
|
314
|
+
const status = code === 0 ? "COMPLETED" : "FAILED";
|
|
315
|
+
client
|
|
316
|
+
.sendJson({
|
|
317
|
+
type: "task_status_update",
|
|
318
|
+
payload: {
|
|
319
|
+
task_id: taskId,
|
|
320
|
+
project_id: projectId,
|
|
321
|
+
status,
|
|
322
|
+
summary: code === 0 ? "completed" : `exited with code ${code}`,
|
|
323
|
+
},
|
|
324
|
+
})
|
|
325
|
+
.catch((err) => {
|
|
326
|
+
logError(`Failed to report task status (${status}) for ${taskId}: ${err?.message || err}`);
|
|
327
|
+
});
|
|
328
|
+
});
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
return {
|
|
332
|
+
close: () => {
|
|
333
|
+
client.disconnect();
|
|
334
|
+
},
|
|
335
|
+
};
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
async function cleanAllAgents(backendUrl, agentToken, fetchImpl) {
|
|
339
|
+
const fetchFn = fetchImpl || fetch;
|
|
340
|
+
const target = `${backendUrl.replace(/\/$/, "")}/agents/cleanup`;
|
|
341
|
+
const headers = {
|
|
342
|
+
Authorization: `Bearer ${agentToken}`,
|
|
343
|
+
};
|
|
344
|
+
const res = await fetch(target, { method: "GET", headers });
|
|
345
|
+
if (!res.ok) {
|
|
346
|
+
throw new Error(`cleanup failed: ${res.status} ${res.statusText}`);
|
|
347
|
+
}
|
|
348
|
+
return res.json();
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
function deriveBackendHttpFromWebsocket(wsUrl) {
|
|
352
|
+
try {
|
|
353
|
+
const url = new URL(wsUrl);
|
|
354
|
+
if (url.protocol === "ws:") {
|
|
355
|
+
url.protocol = "http:";
|
|
356
|
+
} else if (url.protocol === "wss:") {
|
|
357
|
+
url.protocol = "https:";
|
|
358
|
+
}
|
|
359
|
+
url.pathname = "/";
|
|
360
|
+
url.search = "";
|
|
361
|
+
url.hash = "";
|
|
362
|
+
return `${url.protocol}//${url.host}`;
|
|
363
|
+
} catch {
|
|
364
|
+
return null;
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
function deriveWebsocketUrlFromHttp(httpUrl) {
|
|
369
|
+
try {
|
|
370
|
+
const url = new URL(httpUrl);
|
|
371
|
+
const scheme = url.protocol === "https:" ? "wss" : "ws";
|
|
372
|
+
return `${scheme}://${url.host}/ws/agent`;
|
|
373
|
+
} catch {
|
|
374
|
+
return "ws://localhost:6152/ws/agent";
|
|
375
|
+
}
|
|
376
|
+
}
|