chainlesschain 0.38.1 → 0.40.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,474 @@
1
+ /**
2
+ * ChainlessChain WebSocket Server
3
+ *
4
+ * Exposes CLI commands over WebSocket for remote access by IDE plugins,
5
+ * web frontends, automation scripts, etc. Commands are executed by spawning
6
+ * child processes — all 60+ CLI commands are available immediately.
7
+ */
8
+
9
+ import { EventEmitter } from "node:events";
10
+ import { spawn } from "node:child_process";
11
+ import { fileURLToPath } from "node:url";
12
+ import { dirname, join } from "node:path";
13
+ import { WebSocketServer } from "ws";
14
+
15
+ const __filename = fileURLToPath(import.meta.url);
16
+ const __dirname = dirname(__filename);
17
+
18
+ /** Absolute path to the CLI entry point */
19
+ const BIN_PATH = join(__dirname, "..", "..", "bin", "chainlesschain.js");
20
+
21
+ /** Commands that must not be executed via WebSocket */
22
+ const BLOCKED_COMMANDS = new Set(["serve", "chat", "agent", "setup"]);
23
+
24
+ /** Heartbeat interval (ms) */
25
+ const HEARTBEAT_INTERVAL = 30_000;
26
+
27
+ /**
28
+ * Tokenize a command string into an array of arguments.
29
+ * Handles double-quoted and single-quoted strings. Does NOT invoke a shell.
30
+ */
31
+ export function tokenizeCommand(input) {
32
+ const args = [];
33
+ let current = "";
34
+ let inDouble = false;
35
+ let inSingle = false;
36
+ let escape = false;
37
+
38
+ for (const ch of input) {
39
+ if (escape) {
40
+ current += ch;
41
+ escape = false;
42
+ continue;
43
+ }
44
+ if (ch === "\\" && inDouble) {
45
+ escape = true;
46
+ continue;
47
+ }
48
+ if (ch === '"' && !inSingle) {
49
+ inDouble = !inDouble;
50
+ continue;
51
+ }
52
+ if (ch === "'" && !inDouble) {
53
+ inSingle = !inSingle;
54
+ continue;
55
+ }
56
+ if ((ch === " " || ch === "\t") && !inDouble && !inSingle) {
57
+ if (current.length > 0) {
58
+ args.push(current);
59
+ current = "";
60
+ }
61
+ continue;
62
+ }
63
+ current += ch;
64
+ }
65
+ if (current.length > 0) {
66
+ args.push(current);
67
+ }
68
+ return args;
69
+ }
70
+
71
+ export class ChainlessChainWSServer extends EventEmitter {
72
+ /**
73
+ * @param {object} options
74
+ * @param {number} [options.port=18800]
75
+ * @param {string} [options.host="127.0.0.1"]
76
+ * @param {string} [options.token] - If set, clients must authenticate first
77
+ * @param {number} [options.maxConnections=10]
78
+ * @param {number} [options.timeout=30000] - Command execution timeout (ms)
79
+ */
80
+ constructor(options = {}) {
81
+ super();
82
+ this.port = options.port || 18800;
83
+ this.host = options.host || "127.0.0.1";
84
+ this.token = options.token || null;
85
+ this.maxConnections = options.maxConnections || 10;
86
+ this.timeout = options.timeout || 30000;
87
+
88
+ /** @type {WebSocketServer|null} */
89
+ this.wss = null;
90
+
91
+ /** Connected clients: clientId → { ws, authenticated, connectedAt } */
92
+ this.clients = new Map();
93
+
94
+ /** Running child processes: requestId → ChildProcess */
95
+ this.processes = new Map();
96
+
97
+ this._heartbeatTimer = null;
98
+ this._clientCounter = 0;
99
+ }
100
+
101
+ /** Start the WebSocket server */
102
+ start() {
103
+ return new Promise((resolve, reject) => {
104
+ this.wss = new WebSocketServer({
105
+ port: this.port,
106
+ host: this.host,
107
+ });
108
+
109
+ this.wss.on("listening", () => {
110
+ this._startHeartbeat();
111
+ this.emit("listening", { port: this.port, host: this.host });
112
+ resolve();
113
+ });
114
+
115
+ this.wss.on("error", (err) => {
116
+ this.emit("error", err);
117
+ reject(err);
118
+ });
119
+
120
+ this.wss.on("connection", (ws, req) => this._handleConnection(ws, req));
121
+ });
122
+ }
123
+
124
+ /** Stop the server and clean up */
125
+ async stop() {
126
+ if (this._heartbeatTimer) {
127
+ clearInterval(this._heartbeatTimer);
128
+ this._heartbeatTimer = null;
129
+ }
130
+
131
+ // Kill all running child processes
132
+ for (const [id, child] of this.processes) {
133
+ try {
134
+ child.kill("SIGTERM");
135
+ } catch (_err) {
136
+ // Process may have already exited
137
+ }
138
+ this.processes.delete(id);
139
+ }
140
+
141
+ // Close all client connections
142
+ for (const [, client] of this.clients) {
143
+ try {
144
+ client.ws.close(1001, "Server shutting down");
145
+ } catch (_err) {
146
+ // Connection may already be closed
147
+ }
148
+ }
149
+ this.clients.clear();
150
+
151
+ // Close the server
152
+ if (this.wss) {
153
+ await new Promise((resolve) => {
154
+ this.wss.close(() => resolve());
155
+ });
156
+ this.wss = null;
157
+ }
158
+
159
+ this.emit("stopped");
160
+ }
161
+
162
+ /** @private */
163
+ _handleConnection(ws, req) {
164
+ if (this.clients.size >= this.maxConnections) {
165
+ ws.close(1013, "Max connections reached");
166
+ return;
167
+ }
168
+
169
+ const clientId = `client-${++this._clientCounter}`;
170
+ const clientIp =
171
+ req.socket.remoteAddress || req.headers["x-forwarded-for"] || "unknown";
172
+
173
+ this.clients.set(clientId, {
174
+ ws,
175
+ authenticated: !this.token, // If no token required, auto-authenticated
176
+ connectedAt: Date.now(),
177
+ ip: clientIp,
178
+ alive: true,
179
+ });
180
+
181
+ this.emit("connection", { clientId, ip: clientIp });
182
+
183
+ ws.on("message", (data) => {
184
+ try {
185
+ const message = JSON.parse(data.toString("utf8"));
186
+ this._handleMessage(clientId, ws, message);
187
+ } catch (_err) {
188
+ this._send(ws, {
189
+ type: "error",
190
+ code: "INVALID_JSON",
191
+ message: "Failed to parse message as JSON",
192
+ });
193
+ }
194
+ });
195
+
196
+ ws.on("close", () => {
197
+ this.clients.delete(clientId);
198
+ this.emit("disconnection", { clientId });
199
+ });
200
+
201
+ ws.on("pong", () => {
202
+ const client = this.clients.get(clientId);
203
+ if (client) client.alive = true;
204
+ });
205
+ }
206
+
207
+ /** @private */
208
+ _handleMessage(clientId, ws, message) {
209
+ const { id, type } = message;
210
+
211
+ if (!id) {
212
+ this._send(ws, {
213
+ type: "error",
214
+ code: "MISSING_ID",
215
+ message: 'Message must include an "id" field',
216
+ });
217
+ return;
218
+ }
219
+
220
+ // Check authentication
221
+ const client = this.clients.get(clientId);
222
+ if (this.token && !client.authenticated && type !== "auth") {
223
+ this._send(ws, {
224
+ id,
225
+ type: "error",
226
+ code: "AUTH_REQUIRED",
227
+ message: "Authentication required. Send an auth message first.",
228
+ });
229
+ return;
230
+ }
231
+
232
+ switch (type) {
233
+ case "auth":
234
+ this._handleAuth(clientId, ws, message);
235
+ break;
236
+ case "ping":
237
+ this._send(ws, { id, type: "pong", serverTime: Date.now() });
238
+ break;
239
+ case "execute":
240
+ this._executeCommand(id, ws, message.command, false);
241
+ break;
242
+ case "stream":
243
+ this._executeCommand(id, ws, message.command, true);
244
+ break;
245
+ case "cancel":
246
+ this._cancelRequest(id, ws);
247
+ break;
248
+ default:
249
+ this._send(ws, {
250
+ id,
251
+ type: "error",
252
+ code: "UNKNOWN_TYPE",
253
+ message: `Unknown message type: ${type}`,
254
+ });
255
+ }
256
+ }
257
+
258
+ /** @private */
259
+ _handleAuth(clientId, ws, message) {
260
+ const { id, token } = message;
261
+ const success = token === this.token;
262
+ const client = this.clients.get(clientId);
263
+
264
+ if (success && client) {
265
+ client.authenticated = true;
266
+ }
267
+
268
+ this._send(ws, {
269
+ id,
270
+ type: "auth-result",
271
+ success,
272
+ ...(success ? {} : { message: "Invalid token" }),
273
+ });
274
+
275
+ if (!success) {
276
+ // Disconnect after failed auth
277
+ setTimeout(() => ws.close(4001, "Authentication failed"), 100);
278
+ }
279
+ }
280
+
281
+ /** @private */
282
+ _executeCommand(id, ws, command, stream) {
283
+ if (!command || typeof command !== "string") {
284
+ this._send(ws, {
285
+ id,
286
+ type: "error",
287
+ code: "INVALID_COMMAND",
288
+ message: "Command must be a non-empty string",
289
+ });
290
+ return;
291
+ }
292
+
293
+ const args = tokenizeCommand(command.trim());
294
+ if (args.length === 0) {
295
+ this._send(ws, {
296
+ id,
297
+ type: "error",
298
+ code: "INVALID_COMMAND",
299
+ message: "Empty command",
300
+ });
301
+ return;
302
+ }
303
+
304
+ // Block dangerous/interactive commands
305
+ const baseCmd = args[0];
306
+ if (BLOCKED_COMMANDS.has(baseCmd)) {
307
+ this._send(ws, {
308
+ id,
309
+ type: "error",
310
+ code: "COMMAND_BLOCKED",
311
+ message: `Command "${baseCmd}" cannot be executed via WebSocket (interactive or recursive)`,
312
+ });
313
+ return;
314
+ }
315
+
316
+ const child = spawn(process.execPath, [BIN_PATH, ...args], {
317
+ env: {
318
+ ...process.env,
319
+ FORCE_COLOR: "0",
320
+ NO_SPINNER: "1",
321
+ },
322
+ stdio: ["pipe", "pipe", "pipe"],
323
+ windowsHide: true,
324
+ });
325
+
326
+ this.processes.set(id, child);
327
+ this.emit("command:start", { id, command, stream });
328
+
329
+ // Timeout handling
330
+ const timer = setTimeout(() => {
331
+ if (this.processes.has(id)) {
332
+ try {
333
+ child.kill("SIGTERM");
334
+ } catch (_err) {
335
+ // Process may have already exited
336
+ }
337
+ this.processes.delete(id);
338
+ this._send(ws, {
339
+ id,
340
+ type: "error",
341
+ code: "COMMAND_TIMEOUT",
342
+ message: `Command timed out after ${this.timeout}ms`,
343
+ });
344
+ }
345
+ }, this.timeout);
346
+
347
+ if (stream) {
348
+ // Stream mode: send chunks as they arrive
349
+ child.stdout.on("data", (data) => {
350
+ this._send(ws, {
351
+ id,
352
+ type: "stream-data",
353
+ channel: "stdout",
354
+ data: data.toString("utf8"),
355
+ });
356
+ });
357
+
358
+ child.stderr.on("data", (data) => {
359
+ this._send(ws, {
360
+ id,
361
+ type: "stream-data",
362
+ channel: "stderr",
363
+ data: data.toString("utf8"),
364
+ });
365
+ });
366
+
367
+ child.on("close", (exitCode) => {
368
+ clearTimeout(timer);
369
+ this.processes.delete(id);
370
+ this._send(ws, {
371
+ id,
372
+ type: "stream-end",
373
+ exitCode: exitCode ?? 1,
374
+ });
375
+ this.emit("command:end", { id, exitCode });
376
+ });
377
+ } else {
378
+ // Buffered mode: collect all output then send result
379
+ const stdoutChunks = [];
380
+ const stderrChunks = [];
381
+
382
+ child.stdout.on("data", (data) => stdoutChunks.push(data));
383
+ child.stderr.on("data", (data) => stderrChunks.push(data));
384
+
385
+ child.on("close", (exitCode) => {
386
+ clearTimeout(timer);
387
+ this.processes.delete(id);
388
+
389
+ const stdout = Buffer.concat(stdoutChunks).toString("utf8");
390
+ const stderr = Buffer.concat(stderrChunks).toString("utf8");
391
+
392
+ this._send(ws, {
393
+ id,
394
+ type: "result",
395
+ success: exitCode === 0,
396
+ exitCode: exitCode ?? 1,
397
+ stdout,
398
+ stderr,
399
+ });
400
+ this.emit("command:end", { id, exitCode });
401
+ });
402
+ }
403
+
404
+ child.on("error", (err) => {
405
+ clearTimeout(timer);
406
+ this.processes.delete(id);
407
+ this._send(ws, {
408
+ id,
409
+ type: "error",
410
+ code: "SPAWN_ERROR",
411
+ message: err.message,
412
+ });
413
+ });
414
+ }
415
+
416
+ /** @private */
417
+ _cancelRequest(id, ws) {
418
+ const child = this.processes.get(id);
419
+ if (child) {
420
+ try {
421
+ child.kill("SIGTERM");
422
+ } catch (_err) {
423
+ // Process may have already exited
424
+ }
425
+ this.processes.delete(id);
426
+ this._send(ws, {
427
+ id,
428
+ type: "result",
429
+ success: false,
430
+ exitCode: -1,
431
+ stdout: "",
432
+ stderr: "Cancelled by client",
433
+ });
434
+ } else {
435
+ this._send(ws, {
436
+ id,
437
+ type: "error",
438
+ code: "NOT_FOUND",
439
+ message: `No running command with id "${id}"`,
440
+ });
441
+ }
442
+ }
443
+
444
+ /** @private — ping/pong heartbeat to detect dead connections */
445
+ _startHeartbeat() {
446
+ this._heartbeatTimer = setInterval(() => {
447
+ for (const [clientId, client] of this.clients) {
448
+ if (!client.alive) {
449
+ client.ws.terminate();
450
+ this.clients.delete(clientId);
451
+ this.emit("disconnection", { clientId, reason: "heartbeat timeout" });
452
+ continue;
453
+ }
454
+ client.alive = false;
455
+ try {
456
+ client.ws.ping();
457
+ } catch (_err) {
458
+ // Connection may be closing
459
+ }
460
+ }
461
+ }, HEARTBEAT_INTERVAL);
462
+ }
463
+
464
+ /** @private — safe JSON send */
465
+ _send(ws, data) {
466
+ if (ws.readyState === ws.OPEN) {
467
+ try {
468
+ ws.send(JSON.stringify(data));
469
+ } catch (_err) {
470
+ // Connection may have just closed
471
+ }
472
+ }
473
+ }
474
+ }
@@ -35,6 +35,10 @@ import {
35
35
  import { storeMemory, consolidateMemory } from "../lib/hierarchical-memory.js";
36
36
  import { CLIContextEngineering } from "../lib/cli-context-engineering.js";
37
37
  import { createChatFn } from "../lib/cowork-adapter.js";
38
+ import {
39
+ detectTaskType,
40
+ selectModelForTask,
41
+ } from "../lib/task-model-selector.js";
38
42
  import { executeHooks, HookEvents } from "../lib/hook-manager.js";
39
43
  import { CLIPermanentMemory } from "../lib/permanent-memory.js";
40
44
  import { CLIAutonomousAgent, GoalStatus } from "../lib/autonomous-agent.js";
@@ -565,6 +569,7 @@ async function chatWithTools(rawMessages, options) {
565
569
  dashscope: "https://dashscope.aliyuncs.com/compatible-mode/v1",
566
570
  mistral: "https://api.mistral.ai/v1",
567
571
  gemini: "https://generativelanguage.googleapis.com/v1beta/openai",
572
+ volcengine: "https://ark.cn-beijing.volces.com/api/v3",
568
573
  };
569
574
 
570
575
  const providerApiKeyEnvs = {
@@ -573,6 +578,7 @@ async function chatWithTools(rawMessages, options) {
573
578
  dashscope: "DASHSCOPE_API_KEY",
574
579
  mistral: "MISTRAL_API_KEY",
575
580
  gemini: "GEMINI_API_KEY",
581
+ volcengine: "VOLCENGINE_API_KEY",
576
582
  };
577
583
 
578
584
  const url =
@@ -582,7 +588,7 @@ async function chatWithTools(rawMessages, options) {
582
588
 
583
589
  if (!url) {
584
590
  throw new Error(
585
- `Unsupported provider: ${provider}. Supported: ollama, anthropic, openai, deepseek, dashscope, mistral, gemini`,
591
+ `Unsupported provider: ${provider}. Supported: ollama, anthropic, openai, deepseek, dashscope, mistral, gemini, volcengine`,
586
592
  );
587
593
  }
588
594
 
@@ -596,6 +602,7 @@ async function chatWithTools(rawMessages, options) {
596
602
  dashscope: "qwen-turbo",
597
603
  mistral: "mistral-large-latest",
598
604
  gemini: "gemini-2.0-flash",
605
+ volcengine: "doubao-seed-1-6-251015",
599
606
  };
600
607
 
601
608
  const response = await fetch(`${url}/chat/completions`, {
@@ -769,7 +776,7 @@ export async function startAgentRepl(options = {}) {
769
776
  let model = options.model || "qwen2:7b";
770
777
  let provider = options.provider || "ollama";
771
778
  const baseUrl = options.baseUrl || "http://localhost:11434";
772
- const apiKey = options.apiKey || process.env.OPENAI_API_KEY;
779
+ const apiKey = options.apiKey || null;
773
780
 
774
781
  // Bootstrap runtime (best-effort, DB not required)
775
782
  let db = null;
@@ -974,6 +981,7 @@ export async function startAgentRepl(options = {}) {
974
981
  "dashscope",
975
982
  "mistral",
976
983
  "gemini",
984
+ "volcengine",
977
985
  ];
978
986
  if (supported.includes(arg)) {
979
987
  provider = arg;
@@ -987,7 +995,7 @@ export async function startAgentRepl(options = {}) {
987
995
  logger.info(`Current provider: ${chalk.cyan(provider)}`);
988
996
  logger.info(
989
997
  chalk.gray(
990
- "Available: ollama, anthropic, openai, deepseek, dashscope, mistral, gemini",
998
+ "Available: ollama, anthropic, openai, deepseek, dashscope, mistral, gemini, volcengine",
991
999
  ),
992
1000
  );
993
1001
  }
@@ -1556,11 +1564,24 @@ export async function startAgentRepl(options = {}) {
1556
1564
  // Add user message
1557
1565
  messages.push({ role: "user", content: trimmed });
1558
1566
 
1567
+ // Auto-select best model based on task type
1568
+ let activeModel = model;
1569
+ const taskDetection = detectTaskType(trimmed);
1570
+ if (taskDetection.confidence > 0.3) {
1571
+ const recommended = selectModelForTask(provider, taskDetection.taskType);
1572
+ if (recommended && recommended !== activeModel) {
1573
+ activeModel = recommended;
1574
+ logger.info(
1575
+ chalk.gray(`[auto] ${taskDetection.name} → ${activeModel}`),
1576
+ );
1577
+ }
1578
+ }
1579
+
1559
1580
  try {
1560
1581
  process.stdout.write("\n");
1561
1582
  const response = await agentLoop(messages, {
1562
1583
  provider,
1563
- model,
1584
+ model: activeModel,
1564
1585
  baseUrl,
1565
1586
  apiKey,
1566
1587
  contextEngine,
@@ -11,6 +11,7 @@
11
11
  import readline from "readline";
12
12
  import chalk from "chalk";
13
13
  import { logger } from "../lib/logger.js";
14
+ import { BUILT_IN_PROVIDERS } from "../lib/llm-providers.js";
14
15
 
15
16
  const SLASH_COMMANDS = {
16
17
  "/exit": "Exit the chat",
@@ -128,7 +129,7 @@ export async function startChatRepl(options = {}) {
128
129
  let model = options.model || "qwen2:7b";
129
130
  let provider = options.provider || "ollama";
130
131
  const baseUrl = options.baseUrl || "http://localhost:11434";
131
- const apiKey = options.apiKey || process.env.OPENAI_API_KEY;
132
+ const apiKey = options.apiKey || null;
132
133
 
133
134
  const messages = [];
134
135
 
@@ -236,14 +237,21 @@ export async function startChatRepl(options = {}) {
236
237
 
237
238
  if (provider === "ollama") {
238
239
  response = await streamOllama(messages, model, baseUrl, onToken);
239
- } else if (provider === "openai") {
240
+ } else {
241
+ // OpenAI-compatible providers (openai, volcengine, deepseek, dashscope, mistral, gemini, anthropic-proxy)
242
+ const providerDef = BUILT_IN_PROVIDERS[provider];
240
243
  const url =
241
244
  baseUrl !== "http://localhost:11434"
242
245
  ? baseUrl
243
- : "https://api.openai.com/v1";
244
- response = await streamOpenAI(messages, model, url, apiKey, onToken);
245
- } else {
246
- throw new Error(`Unsupported provider: ${provider}`);
246
+ : providerDef?.baseUrl || "https://api.openai.com/v1";
247
+ const key =
248
+ apiKey ||
249
+ (providerDef?.apiKeyEnv ? process.env[providerDef.apiKeyEnv] : null);
250
+ if (!key)
251
+ throw new Error(
252
+ `API key required for ${provider} (set ${providerDef?.apiKeyEnv || "API key"})`,
253
+ );
254
+ response = await streamOpenAI(messages, model, url, key, onToken);
247
255
  }
248
256
 
249
257
  process.stdout.write("\n\n");