quadwork 1.2.3 → 1.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/out/404.html +1 -1
  2. package/out/__next.__PAGE__.txt +1 -1
  3. package/out/__next._full.txt +1 -1
  4. package/out/__next._head.txt +1 -1
  5. package/out/__next._index.txt +1 -1
  6. package/out/__next._tree.txt +1 -1
  7. package/out/_not-found/__next._full.txt +1 -1
  8. package/out/_not-found/__next._head.txt +1 -1
  9. package/out/_not-found/__next._index.txt +1 -1
  10. package/out/_not-found/__next._not-found.__PAGE__.txt +1 -1
  11. package/out/_not-found/__next._not-found.txt +1 -1
  12. package/out/_not-found/__next._tree.txt +1 -1
  13. package/out/_not-found.html +1 -1
  14. package/out/_not-found.txt +1 -1
  15. package/out/app-shell/__next._full.txt +1 -1
  16. package/out/app-shell/__next._head.txt +1 -1
  17. package/out/app-shell/__next._index.txt +1 -1
  18. package/out/app-shell/__next._tree.txt +1 -1
  19. package/out/app-shell/__next.app-shell.__PAGE__.txt +1 -1
  20. package/out/app-shell/__next.app-shell.txt +1 -1
  21. package/out/app-shell.html +1 -1
  22. package/out/app-shell.txt +1 -1
  23. package/out/index.html +1 -1
  24. package/out/index.txt +1 -1
  25. package/out/project/_/__next._full.txt +1 -1
  26. package/out/project/_/__next._head.txt +1 -1
  27. package/out/project/_/__next._index.txt +1 -1
  28. package/out/project/_/__next._tree.txt +1 -1
  29. package/out/project/_/__next.project.$d$id.__PAGE__.txt +1 -1
  30. package/out/project/_/__next.project.$d$id.txt +1 -1
  31. package/out/project/_/__next.project.txt +1 -1
  32. package/out/project/_/memory/__next._full.txt +1 -1
  33. package/out/project/_/memory/__next._head.txt +1 -1
  34. package/out/project/_/memory/__next._index.txt +1 -1
  35. package/out/project/_/memory/__next._tree.txt +1 -1
  36. package/out/project/_/memory/__next.project.$d$id.memory.__PAGE__.txt +1 -1
  37. package/out/project/_/memory/__next.project.$d$id.memory.txt +1 -1
  38. package/out/project/_/memory/__next.project.$d$id.txt +1 -1
  39. package/out/project/_/memory/__next.project.txt +1 -1
  40. package/out/project/_/memory.html +1 -1
  41. package/out/project/_/memory.txt +1 -1
  42. package/out/project/_/queue/__next._full.txt +1 -1
  43. package/out/project/_/queue/__next._head.txt +1 -1
  44. package/out/project/_/queue/__next._index.txt +1 -1
  45. package/out/project/_/queue/__next._tree.txt +1 -1
  46. package/out/project/_/queue/__next.project.$d$id.queue.__PAGE__.txt +1 -1
  47. package/out/project/_/queue/__next.project.$d$id.queue.txt +1 -1
  48. package/out/project/_/queue/__next.project.$d$id.txt +1 -1
  49. package/out/project/_/queue/__next.project.txt +1 -1
  50. package/out/project/_/queue.html +1 -1
  51. package/out/project/_/queue.txt +1 -1
  52. package/out/project/_.html +1 -1
  53. package/out/project/_.txt +1 -1
  54. package/out/settings/__next._full.txt +1 -1
  55. package/out/settings/__next._head.txt +1 -1
  56. package/out/settings/__next._index.txt +1 -1
  57. package/out/settings/__next._tree.txt +1 -1
  58. package/out/settings/__next.settings.__PAGE__.txt +1 -1
  59. package/out/settings/__next.settings.txt +1 -1
  60. package/out/settings.html +1 -1
  61. package/out/settings.txt +1 -1
  62. package/out/setup/__next._full.txt +1 -1
  63. package/out/setup/__next._head.txt +1 -1
  64. package/out/setup/__next._index.txt +1 -1
  65. package/out/setup/__next._tree.txt +1 -1
  66. package/out/setup/__next.setup.__PAGE__.txt +1 -1
  67. package/out/setup/__next.setup.txt +1 -1
  68. package/out/setup.html +1 -1
  69. package/out/setup.txt +1 -1
  70. package/package.json +1 -1
  71. package/server/agentchattr-registry.js +66 -0
  72. package/server/index.js +179 -7
  73. package/server/queue-watcher.js +125 -0
  74. package/server/routes.js +38 -0
  75. /package/out/_next/static/{6W2vNw7Pp8z2_l_OJ2hqC → Cpy01wZHv0vXd_j_HlrSf}/_buildManifest.js +0 -0
  76. /package/out/_next/static/{6W2vNw7Pp8z2_l_OJ2hqC → Cpy01wZHv0vXd_j_HlrSf}/_clientMiddlewareManifest.js +0 -0
  77. /package/out/_next/static/{6W2vNw7Pp8z2_l_OJ2hqC → Cpy01wZHv0vXd_j_HlrSf}/_ssgManifest.js +0 -0
package/server/index.js CHANGED
@@ -8,7 +8,8 @@ const pty = require("node-pty");
8
8
  const { spawn } = require("child_process");
9
9
  const { readConfig, resolveAgentCwd, resolveAgentCommand, resolveProjectChattr, resolveChattrSpawn, syncChattrToken, CONFIG_PATH } = require("./config");
10
10
  const routes = require("./routes");
11
- const { waitForAgentChattrReady, registerAgent, deregisterAgent } = require("./agentchattr-registry");
11
+ const { waitForAgentChattrReady, registerAgent, deregisterAgent, startHeartbeat, stopHeartbeat } = require("./agentchattr-registry");
12
+ const { startQueueWatcher, stopQueueWatcher } = require("./queue-watcher");
12
13
 
13
14
  const net = require("net");
14
15
  const config = readConfig();
@@ -146,14 +147,20 @@ function startMcpProxy(projectId, agentId, upstreamUrl, token) {
146
147
  const existing = mcpProxies.get(key);
147
148
  if (existing) return Promise.resolve(`http://127.0.0.1:${existing.port}/mcp`);
148
149
 
150
+ // #394 / quadwork#253: token is mutable so the 409 recovery path can
151
+ // swap it via updateMcpProxyToken without rebinding the listener —
152
+ // Codex was launched with a fixed proxy URL on an ephemeral port and
153
+ // can't be told to use a new one mid-flight.
154
+ const tokenRef = { current: token };
149
155
  return new Promise((resolve, reject) => {
150
156
  const proxyServer = http.createServer((req, res) => {
151
157
  const parsedUrl = new URL(req.url, `http://127.0.0.1`);
152
158
  const targetUrl = `${upstreamUrl}${parsedUrl.pathname}${parsedUrl.search}`;
153
159
  const headers = { ...req.headers, host: new URL(upstreamUrl).host };
154
- if (token) {
155
- headers["authorization"] = `Bearer ${token}`;
156
- headers["x-agent-token"] = token;
160
+ const tok = tokenRef.current;
161
+ if (tok) {
162
+ headers["authorization"] = `Bearer ${tok}`;
163
+ headers["x-agent-token"] = tok;
157
164
  }
158
165
  delete headers["content-length"];
159
166
 
@@ -180,12 +187,27 @@ function startMcpProxy(projectId, agentId, upstreamUrl, token) {
180
187
  proxyServer.on("error", (err) => reject(err));
181
188
  proxyServer.listen(0, "127.0.0.1", () => {
182
189
  const port = proxyServer.address().port;
183
- mcpProxies.set(key, { server: proxyServer, port });
190
+ mcpProxies.set(key, { server: proxyServer, port, tokenRef });
184
191
  resolve(`http://127.0.0.1:${port}/mcp`);
185
192
  });
186
193
  });
187
194
  }
188
195
 
196
+ /**
197
+ * Swap the bearer token of a running MCP proxy in place. Used by the
198
+ * sub-D 409 recovery path: rebinding the listener would change the
199
+ * ephemeral port and the running Codex process is pinned to the
200
+ * original URL, so we mutate the closure-captured tokenRef instead.
201
+ * Returns true if a proxy existed and was updated.
202
+ */
203
+ function updateMcpProxyToken(projectId, agentId, newToken) {
204
+ const key = `${projectId}/${agentId}`;
205
+ const proxy = mcpProxies.get(key);
206
+ if (!proxy || !proxy.tokenRef) return false;
207
+ proxy.tokenRef.current = newToken;
208
+ return true;
209
+ }
210
+
189
211
  function stopMcpProxy(projectId, agentId) {
190
212
  const key = `${projectId}/${agentId}`;
191
213
  const proxy = mcpProxies.get(key);
@@ -267,7 +289,7 @@ function writeMcpConfigFile(projectId, agentId, mcpHttpPort, token) {
267
289
  async function buildAgentArgs(projectId, agentId) {
268
290
  const cfg = readConfig();
269
291
  const project = cfg.projects?.find((p) => p.id === projectId);
270
- if (!project) return { args: [], acRegistrationName: null, acServerPort: null };
292
+ if (!project) return { args: [], acRegistrationName: null, acServerPort: null, acRegistrationToken: null, acInjectMode: null, acMcpHttpPort: null };
271
293
 
272
294
  const agentCfg = project.agents?.[agentId] || {};
273
295
  const command = agentCfg.command || "claude";
@@ -275,6 +297,8 @@ async function buildAgentArgs(projectId, agentId) {
275
297
  const args = [];
276
298
  let acRegistrationName = null;
277
299
  let acServerPort = null;
300
+ let acRegistrationToken = null;
301
+ let acInjectMode = null;
278
302
 
279
303
  // Permission bypass flags
280
304
  if (agentCfg.auto_approve !== false) {
@@ -287,6 +311,7 @@ async function buildAgentArgs(projectId, agentId) {
287
311
  const token = project.agentchattr_token;
288
312
  if (mcpHttpPort) {
289
313
  const injectMode = agentCfg.mcp_inject || (cliBase === "codex" ? "proxy_flag" : cliBase === "gemini" ? "env" : "flag");
314
+ acInjectMode = injectMode;
290
315
  if (injectMode === "flag") {
291
316
  // Claude/Kimi: register with AgentChattr to obtain a per-agent
292
317
  // token (#239 — session_token is browser auth, not MCP auth) and
@@ -311,6 +336,7 @@ async function buildAgentArgs(projectId, agentId) {
311
336
  throw new Error(`Failed to register ${agentId}: ${registerAgent.lastError}`);
312
337
  }
313
338
  acRegistrationName = registration.name;
339
+ acRegistrationToken = registration.token;
314
340
  writePersistedAgentToken(projectId, agentId, registration.token);
315
341
  const mcpConfigPath = writeMcpConfigFile(projectId, agentId, mcpHttpPort, registration.token);
316
342
  const flag = agentCfg.mcp_flag || "--mcp-config";
@@ -335,6 +361,7 @@ async function buildAgentArgs(projectId, agentId) {
335
361
  throw new Error(`Failed to register ${agentId}: ${registerAgent.lastError}`);
336
362
  }
337
363
  acRegistrationName = registration.name;
364
+ acRegistrationToken = registration.token;
338
365
  writePersistedAgentToken(projectId, agentId, registration.token);
339
366
  const upstreamUrl = `http://127.0.0.1:${mcpHttpPort}`;
340
367
  const proxyUrl = await startMcpProxy(projectId, agentId, upstreamUrl, registration.token);
@@ -344,7 +371,7 @@ async function buildAgentArgs(projectId, agentId) {
344
371
  }
345
372
  }
346
373
 
347
- return { args, acRegistrationName, acServerPort };
374
+ return { args, acRegistrationName, acServerPort, acRegistrationToken, acInjectMode, acMcpHttpPort: mcpHttpPort || null };
348
375
  }
349
376
 
350
377
  /**
@@ -383,6 +410,73 @@ function buildAgentEnv(projectId, agentId) {
383
410
  return env;
384
411
  }
385
412
 
413
+ /**
414
+ * #394 / quadwork#253: recover from a heartbeat 409 (AgentChattr was
415
+ * restarted, in-memory registry wiped, our token is now stale). Mirrors
416
+ * wrapper.py:732-741. Re-registers the running agent, swaps the
417
+ * tracked name/token on the live session so the heartbeat interval
418
+ * picks up the new credentials on its next tick, refreshes whichever
419
+ * MCP transport this agent uses (Claude config file vs Codex proxy),
420
+ * and restarts the queue watcher in case the assigned name changed
421
+ * (multi-instance slot bump).
422
+ *
423
+ * Best-effort: any failure here just means the next 5s heartbeat will
424
+ * fail again and we'll re-enter recovery — no tight retry loop because
425
+ * startHeartbeat guards re-entry with `recovering`.
426
+ */
427
+ async function recoverFrom409(projectId, agentId, session) {
428
+ if (!session.acServerPort) return;
429
+ const cfg = readConfig();
430
+ const project = cfg.projects?.find((p) => p.id === projectId);
431
+ const agentCfg = project?.agents?.[agentId] || {};
432
+ // AC may need a moment to come back up after a restart — wait briefly.
433
+ await waitForAgentChattrReady(session.acServerPort, 10000);
434
+
435
+ // Best-effort cleanup of the stale registration on disk so the
436
+ // fresh register isn't shoved into a slot 2 by leftover state.
437
+ const stale = readPersistedAgentToken(projectId, agentId);
438
+ if (stale) {
439
+ await deregisterAgent(session.acServerPort, agentId, stale).catch(() => {});
440
+ clearPersistedAgentToken(projectId, agentId);
441
+ }
442
+
443
+ const replacement = await registerAgent(session.acServerPort, agentId, agentCfg.display_name || null);
444
+ if (!replacement) return;
445
+
446
+ const previousName = session.acRegistrationName;
447
+ session.acRegistrationName = replacement.name;
448
+ session.acRegistrationToken = replacement.token;
449
+ writePersistedAgentToken(projectId, agentId, replacement.token);
450
+
451
+ // Refresh whichever MCP transport this agent uses so subsequent
452
+ // tool calls (and the queue-watcher's `mcp read` injections) hit
453
+ // AC with the new bearer token instead of the now-rejected one.
454
+ if (session.acInjectMode === "flag" && session.acMcpHttpPort) {
455
+ try { writeMcpConfigFile(projectId, agentId, session.acMcpHttpPort, replacement.token); } catch {}
456
+ } else if (session.acInjectMode === "proxy_flag") {
457
+ // Codex is pinned to the original ephemeral proxy URL, so we
458
+ // can't tear the listener down — mutate the token in place.
459
+ try { updateMcpProxyToken(projectId, agentId, replacement.token); } catch {}
460
+ }
461
+
462
+ // If the assigned name changed (e.g. multi-instance slot collision)
463
+ // the queue watcher is now polling the wrong file. Restart it
464
+ // against the new name so chat reaches the right agent.
465
+ if (replacement.name !== previousName && session.term) {
466
+ if (session.queueWatcherHandle) {
467
+ stopQueueWatcher(session.queueWatcherHandle);
468
+ session.queueWatcherHandle = null;
469
+ }
470
+ try {
471
+ const { dir: acDir } = resolveProjectChattr(projectId);
472
+ if (acDir) {
473
+ const dataDir = path.join(acDir, "data");
474
+ session.queueWatcherHandle = startQueueWatcher(dataDir, replacement.name, session.term);
475
+ }
476
+ } catch {}
477
+ }
478
+ }
479
+
386
480
  // Helper: spawn a PTY for a project/agent and register in agentSessions
387
481
  async function spawnAgentPty(project, agent) {
388
482
  const key = `${project}/${agent}`;
@@ -413,9 +507,53 @@ async function spawnAgentPty(project, agent) {
413
507
  error: null,
414
508
  acRegistrationName: built.acRegistrationName,
415
509
  acServerPort: built.acServerPort,
510
+ acRegistrationToken: built.acRegistrationToken,
511
+ acInjectMode: built.acInjectMode,
512
+ acMcpHttpPort: built.acMcpHttpPort,
513
+ acHeartbeatHandle: null,
514
+ queueWatcherHandle: null,
416
515
  };
417
516
  agentSessions.set(key, session);
418
517
 
518
+ // #391 / quadwork#250: keep this agent alive in AgentChattr by
519
+ // POSTing /api/heartbeat/{name} every 5s. Without it, AC's 60s
520
+ // crash-detection window deregisters the agent and chat messages
521
+ // never reach it. Mirrors wrapper.py:_heartbeat (lines 715-748).
522
+ if (session.acRegistrationName && session.acServerPort && session.acRegistrationToken) {
523
+ // #394 / quadwork#253: pass getters (not raw values) so the 409
524
+ // recovery path below can swap acRegistrationName/Token in place
525
+ // and the very next heartbeat tick uses the replacement
526
+ // credentials without us having to tear down + restart the
527
+ // interval.
528
+ session.acHeartbeatHandle = startHeartbeat(
529
+ session.acServerPort,
530
+ () => session.acRegistrationName,
531
+ () => session.acRegistrationToken,
532
+ { onConflict: () => recoverFrom409(project, agent, session) },
533
+ );
534
+ }
535
+
536
+ // #393 / quadwork#251: queue watcher — the actual mechanism by
537
+ // which agents pick up chat. Without this an agent can be
538
+ // registered + heartbeating yet still never respond, because
539
+ // AgentChattr only writes to {data_dir}/{name}_queue.jsonl and
540
+ // expects the agent side to poll + inject `mcp read`.
541
+ if (session.acRegistrationName && session.term) {
542
+ try {
543
+ const { dir: acDir } = resolveProjectChattr(project);
544
+ if (acDir) {
545
+ const dataDir = path.join(acDir, "data");
546
+ session.queueWatcherHandle = startQueueWatcher(
547
+ dataDir,
548
+ session.acRegistrationName,
549
+ session.term,
550
+ );
551
+ }
552
+ } catch {
553
+ // best-effort — failure here just means no chat injection
554
+ }
555
+ }
556
+
419
557
  term.onExit(({ exitCode }) => {
420
558
  const current = agentSessions.get(key);
421
559
  if (current && current.term === term) {
@@ -427,6 +565,27 @@ async function spawnAgentPty(project, agent) {
427
565
  current.ws.close(1000, `exited:${exitCode}`);
428
566
  }
429
567
  current.ws = null;
568
+ // #391 / quadwork#250: a crashed PTY must also clear its
569
+ // heartbeat interval (otherwise it leaks and a later /start
570
+ // double-registers) and free the AgentChattr slot (otherwise
571
+ // the agent stays falsely `active` forever and the next
572
+ // register lands at slot 2). Deregister is best-effort.
573
+ if (current.acHeartbeatHandle) {
574
+ stopHeartbeat(current.acHeartbeatHandle);
575
+ current.acHeartbeatHandle = null;
576
+ }
577
+ if (current.queueWatcherHandle) {
578
+ stopQueueWatcher(current.queueWatcherHandle);
579
+ current.queueWatcherHandle = null;
580
+ }
581
+ if (current.acRegistrationName && current.acServerPort) {
582
+ deregisterAgent(current.acServerPort, current.acRegistrationName).catch(() => {});
583
+ if (current.projectId && current.agentId) {
584
+ try { clearPersistedAgentToken(current.projectId, current.agentId); } catch {}
585
+ }
586
+ current.acRegistrationName = null;
587
+ current.acRegistrationToken = null;
588
+ }
430
589
  }
431
590
  });
432
591
 
@@ -457,6 +616,18 @@ async function stopAgentSession(key) {
457
616
  session.ws = null;
458
617
  session.state = "stopped";
459
618
  session.error = null;
619
+ // Stop heartbeat before deregister so we don't race a final POST
620
+ // against AgentChattr removing the name (#391 / quadwork#250).
621
+ if (session.acHeartbeatHandle) {
622
+ stopHeartbeat(session.acHeartbeatHandle);
623
+ session.acHeartbeatHandle = null;
624
+ }
625
+ // Stop queue watcher (#393 / quadwork#251) — the PTY is gone,
626
+ // injecting into a dead term would throw on the next tick.
627
+ if (session.queueWatcherHandle) {
628
+ stopQueueWatcher(session.queueWatcherHandle);
629
+ session.queueWatcherHandle = null;
630
+ }
460
631
  // Best-effort deregister from AgentChattr (#241) so the slot frees
461
632
  // and the next register lands at slot 1 instead of head-2.
462
633
  if (session.acRegistrationName && session.acServerPort) {
@@ -469,6 +640,7 @@ async function stopAgentSession(key) {
469
640
  clearPersistedAgentToken(session.projectId, session.agentId);
470
641
  }
471
642
  session.acRegistrationName = null;
643
+ session.acRegistrationToken = null;
472
644
  }
473
645
  // Clean up MCP auth proxy if running
474
646
  const [projectId, agentId] = key.split("/");
@@ -0,0 +1,125 @@
1
+ /**
2
+ * Per-agent queue watcher (#393 / quadwork#251).
3
+ *
4
+ * AgentChattr does NOT push chat to agents. When the operator types
5
+ * `@head` in chat, AC writes a job line to `{data_dir}/{name}_queue.jsonl`
6
+ * and walks away. Something on the agent side has to poll that file and
7
+ * inject an `mcp read` prompt into the running CLI's PTY so the agent
8
+ * picks up the chat. Without that injection the agent never responds,
9
+ * even when registration and heartbeats work.
10
+ *
11
+ * Reference: /Users/cho/Projects/agentchattr/wrapper.py lines 438-541
12
+ * (`_queue_watcher`). Polling (not fs.watch) is intentional: matches
13
+ * wrapper.py's behavior and avoids the cross-platform fs.watch
14
+ * footguns. The role/rules/identity-hint additions from wrapper.py
15
+ * lines 501-528 are intentionally out of scope for v1 per the issue.
16
+ */
17
+
18
+ const fs = require("fs");
19
+ const path = require("path");
20
+
21
+ const POLL_INTERVAL_MS = 1000;
22
+
23
+ /**
24
+ * Start polling `{dataDir}/{agentName}_queue.jsonl`. When non-empty,
25
+ * read all lines, truncate the file (atomic-ish claim — same race the
26
+ * Python wrapper accepts), parse each JSON line, build a single
27
+ * injected prompt, and write it into the supplied PTY terminal.
28
+ *
29
+ * Returns an opaque interval handle. Pass it to stopQueueWatcher to
30
+ * cancel; safe to call with null.
31
+ */
32
+ function startQueueWatcher(dataDir, agentName, ptyTerm) {
33
+ if (!dataDir || !agentName || !ptyTerm) return null;
34
+ const queueFile = path.join(dataDir, `${agentName}_queue.jsonl`);
35
+
36
+ const tick = () => {
37
+ try {
38
+ if (!fs.existsSync(queueFile)) return;
39
+ const stat = fs.statSync(queueFile);
40
+ if (stat.size === 0) return;
41
+
42
+ const content = fs.readFileSync(queueFile, "utf-8");
43
+ // Atomic claim: truncate immediately so the next AC write lands
44
+ // in an empty file and we don't double-process the same job on
45
+ // the next tick. There's a small race if AC writes between the
46
+ // read and the truncate; wrapper.py accepts the same race.
47
+ fs.writeFileSync(queueFile, "");
48
+
49
+ const lines = content.split("\n").map((l) => l.trim()).filter(Boolean);
50
+ if (lines.length === 0) return;
51
+
52
+ let channel = "general";
53
+ let customPrompt = "";
54
+ let jobId = null;
55
+ let hasTrigger = false;
56
+ for (const line of lines) {
57
+ let data;
58
+ try {
59
+ data = JSON.parse(line);
60
+ } catch {
61
+ continue;
62
+ }
63
+ hasTrigger = true;
64
+ if (data && typeof data === "object") {
65
+ if (typeof data.channel === "string") channel = data.channel;
66
+ // AgentChattr serializes job_id as an integer (agents.py
67
+ // defines `job_id: int | None`), so accept both numbers and
68
+ // strings here. Without this, job-thread triggers fall back
69
+ // to the channel prompt and the agent reads the wrong
70
+ // conversation. Cast to string for the prompt template.
71
+ if (typeof data.job_id === "number" || typeof data.job_id === "string") {
72
+ jobId = String(data.job_id);
73
+ }
74
+ if (typeof data.prompt === "string" && data.prompt.trim()) {
75
+ customPrompt = data.prompt.trim();
76
+ }
77
+ }
78
+ }
79
+ if (!hasTrigger) return;
80
+
81
+ let prompt;
82
+ if (customPrompt) {
83
+ prompt = customPrompt;
84
+ } else if (jobId) {
85
+ prompt = `mcp read job_id=${jobId} - you were mentioned in a job thread, take appropriate action`;
86
+ } else {
87
+ prompt = `mcp read #${channel} - you were mentioned, take appropriate action`;
88
+ }
89
+
90
+ // Flatten newlines: multi-line writes trigger paste detection in
91
+ // Claude Code (shows "[Pasted text +N]") and can break injection
92
+ // of long prompts. Mirrors wrapper.py:532.
93
+ const flat = prompt.replace(/\n/g, " ");
94
+ // Inject text and Enter as SEPARATE writes with a delay between.
95
+ // Codex's TUI does not submit when text + "\r" arrive in one chunk —
96
+ // it needs the text to render, then a separate Enter keystroke.
97
+ // Claude Code accepts either form. Mirrors wrapper_unix.py inject():
98
+ // tmux send-keys -l <text> ; sleep ; tmux send-keys Enter.
99
+ // Delay scales with prompt length so longer prompts get more time
100
+ // to render before submit.
101
+ ptyTerm.write(flat);
102
+ const submitDelayMs = Math.max(300, flat.length);
103
+ setTimeout(() => {
104
+ try { ptyTerm.write("\r"); } catch { /* swallow */ }
105
+ }, submitDelayMs);
106
+ } catch {
107
+ // Swallow — next tick will retry. Logging here would spam the
108
+ // server output once per second on a permission error.
109
+ }
110
+ };
111
+
112
+ return setInterval(tick, POLL_INTERVAL_MS);
113
+ }
114
+
115
+ /**
116
+ * Stop a watcher started by startQueueWatcher. Safe to call with null.
117
+ */
118
+ function stopQueueWatcher(handle) {
119
+ if (handle) clearInterval(handle);
120
+ }
121
+
122
+ module.exports = {
123
+ startQueueWatcher,
124
+ stopQueueWatcher,
125
+ };
package/server/routes.js CHANGED
@@ -922,6 +922,44 @@ router.post("/api/setup", (req, res) => {
922
922
  // ~/.quadwork/{id}/OVERNIGHT-QUEUE.md.
923
923
  writeOvernightQueueFileSafe(id, name || id, repo);
924
924
 
925
+ // Batch 28 / #392 / quadwork#252: auto-spawn the per-project
926
+ // AgentChattr process. The CLI wizard's writeAgentChattrConfig
927
+ // does this; the web wizard previously left the install dormant
928
+ // until the user clicked Restart, so MCP fell through to a stale
929
+ // instance on port 8300. Mirror the loopback-restart pattern
930
+ // already used by the agentchattr-config branch above. Failures
931
+ // are non-fatal — the dashboard's Restart button is still
932
+ // available, and per the issue add-config must still return ok.
933
+ try {
934
+ const qwPort = cfg.port || 8400;
935
+ fetch(
936
+ `http://127.0.0.1:${qwPort}/api/agentchattr/${encodeURIComponent(id)}/restart`,
937
+ { method: "POST" },
938
+ )
939
+ .then(async (r) => {
940
+ // /restart reports spawn failures (e.g. port collision —
941
+ // server/index.js:650-668) as HTTP 500, so a resolved
942
+ // fetch is not the same thing as a successful spawn. Log
943
+ // non-2xx responses with status and body so the operator
944
+ // can see why the auto-spawn silently didn't take.
945
+ if (!r.ok) {
946
+ let detail = "";
947
+ try { detail = (await r.text()).slice(0, 500); } catch {}
948
+ console.warn(
949
+ `[setup] auto-spawn AgentChattr for ${id} returned HTTP ${r.status}: ${detail}`,
950
+ );
951
+ }
952
+ })
953
+ .catch((err) => {
954
+ console.warn(
955
+ `[setup] auto-spawn AgentChattr for ${id} failed:`,
956
+ err.message || err,
957
+ );
958
+ });
959
+ } catch (err) {
960
+ console.warn(`[setup] auto-spawn AgentChattr for ${id} skipped:`, err.message || err);
961
+ }
962
+
925
963
  return res.json({ ok: true });
926
964
  }
927
965
  default: