quadwork 1.2.5 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/README.md +189 -82
  2. package/bin/quadwork.js +102 -0
  3. package/out/404.html +1 -1
  4. package/out/__next.__PAGE__.txt +3 -3
  5. package/out/__next._full.txt +12 -12
  6. package/out/__next._head.txt +4 -4
  7. package/out/__next._index.txt +6 -6
  8. package/out/__next._tree.txt +2 -2
  9. package/out/_next/static/chunks/006g3lco-9xqf.js +1 -0
  10. package/out/_next/static/chunks/035rt-n0oid7d.js +1 -0
  11. package/out/_next/static/chunks/{0e~ue9ca5zrep.js → 05ok82hwk0x-c.js} +1 -1
  12. package/out/_next/static/chunks/0u~7e4fgf-u06.css +2 -0
  13. package/out/_next/static/chunks/0zqyw6q.jp~1i.js +26 -0
  14. package/out/_next/static/chunks/17y2walb2um9w.js +1 -0
  15. package/out/_next/static/chunks/{16ell.n1p8o7d.js → 18cmux34jwe.p.js} +1 -1
  16. package/out/_not-found/__next._full.txt +11 -11
  17. package/out/_not-found/__next._head.txt +4 -4
  18. package/out/_not-found/__next._index.txt +6 -6
  19. package/out/_not-found/__next._not-found.__PAGE__.txt +2 -2
  20. package/out/_not-found/__next._not-found.txt +3 -3
  21. package/out/_not-found/__next._tree.txt +2 -2
  22. package/out/_not-found.html +1 -1
  23. package/out/_not-found.txt +11 -11
  24. package/out/app-shell/__next._full.txt +11 -11
  25. package/out/app-shell/__next._head.txt +4 -4
  26. package/out/app-shell/__next._index.txt +6 -6
  27. package/out/app-shell/__next._tree.txt +2 -2
  28. package/out/app-shell/__next.app-shell.__PAGE__.txt +2 -2
  29. package/out/app-shell/__next.app-shell.txt +3 -3
  30. package/out/app-shell.html +1 -1
  31. package/out/app-shell.txt +11 -11
  32. package/out/index.html +1 -1
  33. package/out/index.txt +12 -12
  34. package/out/project/_/__next._full.txt +12 -12
  35. package/out/project/_/__next._head.txt +4 -4
  36. package/out/project/_/__next._index.txt +6 -6
  37. package/out/project/_/__next._tree.txt +2 -2
  38. package/out/project/_/__next.project.$d$id.__PAGE__.txt +3 -3
  39. package/out/project/_/__next.project.$d$id.txt +3 -3
  40. package/out/project/_/__next.project.txt +3 -3
  41. package/out/project/_/memory/__next._full.txt +12 -12
  42. package/out/project/_/memory/__next._head.txt +4 -4
  43. package/out/project/_/memory/__next._index.txt +6 -6
  44. package/out/project/_/memory/__next._tree.txt +2 -2
  45. package/out/project/_/memory/__next.project.$d$id.memory.__PAGE__.txt +3 -3
  46. package/out/project/_/memory/__next.project.$d$id.memory.txt +3 -3
  47. package/out/project/_/memory/__next.project.$d$id.txt +3 -3
  48. package/out/project/_/memory/__next.project.txt +3 -3
  49. package/out/project/_/memory.html +1 -1
  50. package/out/project/_/memory.txt +12 -12
  51. package/out/project/_/queue/__next._full.txt +12 -12
  52. package/out/project/_/queue/__next._head.txt +4 -4
  53. package/out/project/_/queue/__next._index.txt +6 -6
  54. package/out/project/_/queue/__next._tree.txt +2 -2
  55. package/out/project/_/queue/__next.project.$d$id.queue.__PAGE__.txt +3 -3
  56. package/out/project/_/queue/__next.project.$d$id.queue.txt +3 -3
  57. package/out/project/_/queue/__next.project.$d$id.txt +3 -3
  58. package/out/project/_/queue/__next.project.txt +3 -3
  59. package/out/project/_/queue.html +1 -1
  60. package/out/project/_/queue.txt +12 -12
  61. package/out/project/_.html +1 -1
  62. package/out/project/_.txt +12 -12
  63. package/out/settings/__next._full.txt +12 -12
  64. package/out/settings/__next._head.txt +4 -4
  65. package/out/settings/__next._index.txt +6 -6
  66. package/out/settings/__next._tree.txt +2 -2
  67. package/out/settings/__next.settings.__PAGE__.txt +3 -3
  68. package/out/settings/__next.settings.txt +3 -3
  69. package/out/settings.html +1 -1
  70. package/out/settings.txt +12 -12
  71. package/out/setup/__next._full.txt +12 -12
  72. package/out/setup/__next._head.txt +4 -4
  73. package/out/setup/__next._index.txt +6 -6
  74. package/out/setup/__next._tree.txt +2 -2
  75. package/out/setup/__next.setup.__PAGE__.txt +3 -3
  76. package/out/setup/__next.setup.txt +3 -3
  77. package/out/setup.html +1 -1
  78. package/out/setup.txt +12 -12
  79. package/out/sounds/alert-tone.mp3 +0 -0
  80. package/out/sounds/click.mp3 +0 -0
  81. package/out/sounds/pluck.mp3 +0 -0
  82. package/out/sounds/soft-chime.mp3 +0 -0
  83. package/out/sounds/warm-bell.mp3 +0 -0
  84. package/package.json +5 -2
  85. package/server/config.js +43 -1
  86. package/server/index.js +268 -21
  87. package/server/routes.js +1103 -7
  88. package/templates/CLAUDE.md +1 -0
  89. package/templates/OVERNIGHT-QUEUE.md +1 -1
  90. package/templates/seeds/head.AGENTS.md +21 -2
  91. package/out/_next/static/chunks/0-yus965h3bk_.js +0 -24
  92. package/out/_next/static/chunks/0caq73v0knw_w.js +0 -1
  93. package/out/_next/static/chunks/0d.f~y5jeh785.css +0 -2
  94. package/out/_next/static/chunks/0md7hgvwnovzq.js +0 -1
  95. package/out/_next/static/chunks/0spbjcw4anq15.js +0 -1
  96. /package/out/_next/static/{Cpy01wZHv0vXd_j_HlrSf → 6uvV3nUfwr_t_JKrZJSP8}/_buildManifest.js +0 -0
  97. /package/out/_next/static/{Cpy01wZHv0vXd_j_HlrSf → 6uvV3nUfwr_t_JKrZJSP8}/_clientMiddlewareManifest.js +0 -0
  98. /package/out/_next/static/{Cpy01wZHv0vXd_j_HlrSf → 6uvV3nUfwr_t_JKrZJSP8}/_ssgManifest.js +0 -0
package/server/index.js CHANGED
@@ -16,7 +16,13 @@ const config = readConfig();
16
16
  const PORT = config.port || 8400;
17
17
 
18
18
  const app = express();
19
- app.use(express.json());
19
+ // #412 / quadwork#279: bump the global JSON body limit to 10mb so
20
+ // POST /api/project-history can accept full chat exports. The
21
+ // default ~100kb 413'd long before the route-local parser had a
22
+ // chance to apply its own 10mb cap (the global parser runs first).
23
+ // All other routes are well within 10mb in practice; this is the
24
+ // least invasive fix and matches the documented import ceiling.
25
+ app.use(express.json({ limit: "10mb" }));
20
26
 
21
27
  // --- Mount migrated API routes (from Next.js) ---
22
28
  app.use(routes);
@@ -658,6 +664,55 @@ app.get("/api/agents", (_req, res) => {
658
664
  res.json(agents);
659
665
  });
660
666
 
667
+ // #424 / quadwork#304: best-effort auto-snapshot of chat history
668
+ // before any AgentChattr restart. Defense-in-depth against
669
+ // destructive ops like /clear that rewrite AC's JSONL log in place
670
+ // — per #303 the log itself IS persistent across normal restarts,
671
+ // so the snapshot's job is to give the operator a point-in-time
672
+ // rollback if the log gets clobbered, not to prevent history loss
673
+ // on ordinary lifecycle events.
674
+ //
675
+ // Snapshot contents = the same envelope GET /api/project-history
676
+ // returns, so an operator (or a future "restore" button) can feed
677
+ // the file straight into POST /api/project-history for replay.
678
+ const HISTORY_SNAPSHOT_LIMIT = 5;
679
+
680
+ async function snapshotProjectHistory(projectId) {
681
+ try {
682
+ const snapDir = path.join(require("os").homedir(), ".quadwork", projectId, "history-snapshots");
683
+ if (!fs.existsSync(snapDir)) fs.mkdirSync(snapDir, { recursive: true });
684
+ const res = await fetch(`http://127.0.0.1:${PORT}/api/project-history?project=${encodeURIComponent(projectId)}`, {
685
+ signal: AbortSignal.timeout(30000),
686
+ });
687
+ if (!res.ok) {
688
+ console.warn(`[snapshot] ${projectId} history fetch returned ${res.status}; skipping snapshot`);
689
+ return false;
690
+ }
691
+ const text = await res.text();
692
+ const stamp = new Date().toISOString().replace(/[:.]/g, "-");
693
+ const outPath = path.join(snapDir, `${stamp}.json`);
694
+ fs.writeFileSync(outPath, text);
695
+ console.log(`[snapshot] ${projectId} → ${outPath}`);
696
+ // Prune to the newest HISTORY_SNAPSHOT_LIMIT files so the
697
+ // directory can't grow unbounded across weeks of restarts.
698
+ try {
699
+ const entries = fs.readdirSync(snapDir)
700
+ .filter((f) => f.endsWith(".json"))
701
+ .map((f) => ({ f, t: fs.statSync(path.join(snapDir, f)).mtimeMs }))
702
+ .sort((a, b) => b.t - a.t);
703
+ for (const old of entries.slice(HISTORY_SNAPSHOT_LIMIT)) {
704
+ try { fs.unlinkSync(path.join(snapDir, old.f)); } catch {}
705
+ }
706
+ } catch {
707
+ // non-fatal — stale snapshots just linger
708
+ }
709
+ return true;
710
+ } catch (err) {
711
+ console.warn(`[snapshot] ${projectId} snapshot failed: ${err.message || err}`);
712
+ return false;
713
+ }
714
+ }
715
+
661
716
  // Per-project AgentChattr lifecycle: /api/agentchattr/:project/:action
662
717
  // Backward compat: /api/agentchattr/:action uses first project
663
718
  async function handleAgentChattr(req, res) {
@@ -779,6 +834,18 @@ async function handleAgentChattr(req, res) {
779
834
  setProc({ process: null, state: "stopped", error: null });
780
835
  res.json({ ok: true, state: "stopped" });
781
836
  } else if (action === "restart") {
837
+ // #424 / quadwork#304: snapshot history before killing the
838
+ // process. Best-effort and non-blocking-on-failure so a flaky
839
+ // snapshot doesn't leave the operator unable to restart AC.
840
+ await snapshotProjectHistory(projectId).catch(() => {});
841
+ // #424 / quadwork#304 Phase 3: latch the opt-in BEFORE the
842
+ // spawn so a restart that itself clears the flag can't starve
843
+ // the auto-restore. We capture the snapshot filename we just
844
+ // wrote + the project's auto_restore_after_restart flag and
845
+ // replay it in the post-spawn tick below if both are set.
846
+ const preRestartCfg = readConfig();
847
+ const preRestartProject = preRestartCfg.projects?.find((p) => p.id === projectId);
848
+ const shouldAutoRestore = !!(preRestartProject && preRestartProject.auto_restore_after_restart);
782
849
  const proc = getProc();
783
850
  if (proc.process) {
784
851
  try { proc.process.kill("SIGTERM"); } catch {}
@@ -793,6 +860,30 @@ async function handleAgentChattr(req, res) {
793
860
  }
794
861
  // Sync token after AgentChattr restarts
795
862
  setTimeout(() => syncChattrToken(projectId), 2000);
863
+ // #424 / quadwork#304 Phase 3: optional auto-restore.
864
+ // Fire the restore 3s after spawn so AC's ws is ready.
865
+ // Best-effort: never blocks the restart response or
866
+ // rolls back on error.
867
+ if (shouldAutoRestore) {
868
+ setTimeout(async () => {
869
+ try {
870
+ const snapDir = path.join(require("os").homedir(), ".quadwork", projectId, "history-snapshots");
871
+ if (!fs.existsSync(snapDir)) return;
872
+ const newest = fs.readdirSync(snapDir)
873
+ .filter((f) => f.endsWith(".json"))
874
+ .map((f) => ({ f, t: fs.statSync(path.join(snapDir, f)).mtimeMs }))
875
+ .sort((a, b) => b.t - a.t)[0];
876
+ if (!newest) return;
877
+ const r = await fetch(`http://127.0.0.1:${PORT}/api/project-history/restore?project=${encodeURIComponent(projectId)}&name=${encodeURIComponent(newest.f)}`, {
878
+ method: "POST",
879
+ });
880
+ if (r.ok) console.log(`[snapshot] ${projectId} auto-restored ${newest.f}`);
881
+ else console.warn(`[snapshot] ${projectId} auto-restore returned ${r.status}`);
882
+ } catch (err) {
883
+ console.warn(`[snapshot] ${projectId} auto-restore failed: ${err.message || err}`);
884
+ }
885
+ }, 3000);
886
+ }
796
887
  res.json({ ok: true, state: "running", pid: child.pid });
797
888
  } catch (err) {
798
889
  setProc({ process: null, state: "error", error: err.message });
@@ -808,7 +899,16 @@ async function handleAgentChattr(req, res) {
808
899
  try {
809
900
  const { execSync } = require("child_process");
810
901
 
811
- // Stop running process before pulling
902
+ // Stop running process before pulling. Snapshot first so a
903
+ // botched git pull can still be rolled back from disk.
904
+ // #424 / quadwork#304: best-effort.
905
+ await snapshotProjectHistory(projectId).catch(() => {});
906
+ // Latch the auto-restore opt-in BEFORE stop, same as the
907
+ // explicit restart branch above — a config mutation during
908
+ // the git pull shouldn't starve the replay.
909
+ const updateCfgPre = readConfig();
910
+ const updateProjectPre = updateCfgPre.projects?.find((p) => p.id === projectId);
911
+ const updateShouldAutoRestore = !!(updateProjectPre && updateProjectPre.auto_restore_after_restart);
812
912
  const proc = getProc();
813
913
  const wasRunning = proc.process && proc.state === "running";
814
914
  if (wasRunning) {
@@ -833,6 +933,30 @@ async function handleAgentChattr(req, res) {
833
933
  restarted = !!child;
834
934
  if (child) {
835
935
  setTimeout(() => syncChattrToken(projectId).catch(() => {}), 2000);
936
+ // #424 / quadwork#304 Phase 3: auto-restore after an
937
+ // update-triggered restart too (t2a re-review). Same
938
+ //3s wait + newest-snapshot-by-mtime path as the explicit
939
+ // restart branch, using the pre-stop latched opt-in.
940
+ if (updateShouldAutoRestore) {
941
+ setTimeout(async () => {
942
+ try {
943
+ const snapDir = path.join(require("os").homedir(), ".quadwork", projectId, "history-snapshots");
944
+ if (!fs.existsSync(snapDir)) return;
945
+ const newest = fs.readdirSync(snapDir)
946
+ .filter((f) => f.endsWith(".json"))
947
+ .map((f) => ({ f, t: fs.statSync(path.join(snapDir, f)).mtimeMs }))
948
+ .sort((a, b) => b.t - a.t)[0];
949
+ if (!newest) return;
950
+ const r = await fetch(`http://127.0.0.1:${PORT}/api/project-history/restore?project=${encodeURIComponent(projectId)}&name=${encodeURIComponent(newest.f)}`, {
951
+ method: "POST",
952
+ });
953
+ if (r.ok) console.log(`[snapshot] ${projectId} auto-restored ${newest.f} after update`);
954
+ else console.warn(`[snapshot] ${projectId} post-update auto-restore returned ${r.status}`);
955
+ } catch (err) {
956
+ console.warn(`[snapshot] ${projectId} post-update auto-restore failed: ${err.message || err}`);
957
+ }
958
+ }, 3000);
959
+ }
836
960
  }
837
961
  }
838
962
 
@@ -983,19 +1107,24 @@ ALL: Communicate via this chat by tagging agents. Your terminal is NOT visible.`
983
1107
  async function sendTriggerMessage(projectId) {
984
1108
  const cfg = readConfig();
985
1109
  const project = cfg.projects && cfg.projects.find((p) => p.id === projectId);
986
- const { url: chattrUrl, token: chattrToken } = resolveProjectChattr(projectId);
987
- const token = chattrToken || "";
988
1110
  const message = (project && project.trigger_message) || DEFAULT_MESSAGE;
989
- const headers = { "Content-Type": "application/json" };
990
- if (token) headers["x-session-token"] = token;
1111
+
1112
+ // #401 / quadwork#277: route trigger sends through the local
1113
+ // /api/chat path that already works for the chat panel. The old
1114
+ // direct /api/send call required a registration token (not the
1115
+ // session token we have on hand) and 401'd silently — agents never
1116
+ // saw the queue-check pulse. /api/chat opens the AC ws with the
1117
+ // session token and inherits the #230 token-resync-on-401 retry,
1118
+ // so the trigger now gets the same proven path as the chat panel.
1119
+ const qwPort = cfg.port || 8400;
1120
+ const url = `http://127.0.0.1:${qwPort}/api/chat?project=${encodeURIComponent(projectId)}`;
991
1121
 
992
1122
  const info = triggers.get(projectId);
993
1123
  try {
994
- let tokenParam = token ? `?token=${encodeURIComponent(token)}` : "";
995
- const res = await fetch(`${chattrUrl}/api/send${tokenParam}`, {
1124
+ const res = await fetch(url, {
996
1125
  method: "POST",
997
- headers,
998
- body: JSON.stringify({ text: message, channel: "general", sender: "user" }),
1126
+ headers: { "Content-Type": "application/json" },
1127
+ body: JSON.stringify({ text: message, channel: "general" }),
999
1128
  });
1000
1129
  if (!res.ok) {
1001
1130
  const err = await res.text().catch(() => "");
@@ -1075,7 +1204,12 @@ function stopTrigger(project) {
1075
1204
 
1076
1205
  app.post("/api/triggers/:project/start", (req, res) => {
1077
1206
  const { project } = req.params;
1078
- const { interval, duration, message, sendImmediately } = req.body || {};
1207
+ // #418 / quadwork#306: sendImmediately was an always-true
1208
+ // "Send Message and Start Trigger" flag from #210; operators
1209
+ // asked for a pure scheduler ("Start Trigger" — wait for the
1210
+ // first interval). The field is ignored here; the send-now
1211
+ // endpoint below still exists for the explicit one-shot path.
1212
+ const { interval, duration, message } = req.body || {};
1079
1213
  const ms = (interval || 30) * 60 * 1000;
1080
1214
  const durationMs = duration ? duration * 60 * 1000 : 0; // duration in minutes, 0 = indefinite
1081
1215
 
@@ -1102,16 +1236,12 @@ app.post("/api/triggers/:project/start", (req, res) => {
1102
1236
  if (existing.durationTimer) clearTimeout(existing.durationTimer);
1103
1237
  }
1104
1238
 
1105
- // #210: the Scheduled Trigger widget's "Send Message and Start
1106
- // Trigger" button expects an immediate send, not the first fire
1107
- // one interval in the future. setInterval won't do that on its
1108
- // own, so trigger a one-shot send when sendImmediately is true.
1109
- if (sendImmediately) {
1110
- // Don't await keep the response fast. sendTriggerMessage logs
1111
- // its own errors and updates lastError on the trigger info.
1112
- sendTriggerMessage(project).catch(() => {});
1113
- }
1114
-
1239
+ // #418 / quadwork#306: no immediate fire the first send happens
1240
+ // at T + interval via the setInterval below. Operators set the
1241
+ // trigger up in advance of going afk and don't want it interrupting
1242
+ // whatever agents are currently mid-task. The explicit "send now"
1243
+ // path still lives at /api/triggers/:project/send-now for the
1244
+ // rare case an operator actually wants to kick things off.
1115
1245
  const timer = setInterval(() => sendTriggerMessage(project), ms);
1116
1246
  const expiresAt = durationMs > 0 ? Date.now() + durationMs : null;
1117
1247
 
@@ -1379,6 +1509,123 @@ function syncTriggersFromConfig() {
1379
1509
  }
1380
1510
  }
1381
1511
 
1512
+ // #422 / quadwork#310: auto-continue after loop guard.
1513
+ //
1514
+ // Per opted-in project, poll AC's /api/status every 10s. When we see
1515
+ // a false → true transition on `paused`, wait the configured delay
1516
+ // (default 30s) and POST /continue to /api/chat — same path the
1517
+ // operator would use manually. The delay gives a human a chance to
1518
+ // intervene on an actually-runaway loop, and acts as a soft rate
1519
+ // limit against pathological loops that would otherwise just loop
1520
+ // forever under an auto-continue.
1521
+ //
1522
+ // Detection is deliberately polling rather than a long-lived ws:
1523
+ // a ws subscription per project would complicate lifecycle and
1524
+ // reconnection, and 10s polling latency is acceptable when the
1525
+ // delay is tens of seconds. Skipping projects without the opt-in
1526
+ // keeps the poller cheap for single-project setups.
1527
+
1528
+ const _loopGuardPausedState = new Map(); // projectId -> { paused: bool, scheduled: Timeout? }
1529
+ const LOOP_GUARD_POLL_INTERVAL_MS = 10000;
1530
+
1531
+ async function checkLoopGuardPause(project) {
1532
+ if (!project || !project.auto_continue_loop_guard) return;
1533
+ const { url: base, token: sessionToken } = resolveProjectChattr(project.id);
1534
+ if (!base) return;
1535
+ let paused = false;
1536
+ try {
1537
+ const r = await fetch(`${base}/api/status`, {
1538
+ headers: sessionToken ? { "x-session-token": sessionToken } : {},
1539
+ signal: AbortSignal.timeout(5000),
1540
+ });
1541
+ if (!r.ok) return;
1542
+ const data = await r.json();
1543
+ paused = !!(data && data.paused);
1544
+ } catch {
1545
+ return;
1546
+ }
1547
+ const state = _loopGuardPausedState.get(project.id) || { paused: false, scheduled: null };
1548
+ // Transition false → true: schedule an auto-continue after the delay.
1549
+ if (paused && !state.paused && !state.scheduled) {
1550
+ const delaySec = Number.isFinite(project.auto_continue_delay_sec) && project.auto_continue_delay_sec >= 5
1551
+ ? project.auto_continue_delay_sec
1552
+ : 30;
1553
+ console.log(`[loop-guard] ${project.id} paused — auto-continue in ${delaySec}s`);
1554
+ state.scheduled = setTimeout(async () => {
1555
+ try {
1556
+ // Re-check the opt-in at fire time so a checkbox disable
1557
+ // mid-wait actually stops the auto-continue.
1558
+ const freshCfg = readConfig();
1559
+ const fresh = freshCfg.projects?.find((p) => p.id === project.id);
1560
+ if (!fresh || !fresh.auto_continue_loop_guard) {
1561
+ console.log(`[loop-guard] ${project.id} auto-continue cancelled (opt-in disabled during wait)`);
1562
+ } else {
1563
+ // Re-check the router's pause state at fire time too. The
1564
+ // 10s status poller may not have seen a manual operator
1565
+ // /continue yet when the delay window (5–9s) is shorter
1566
+ // than the poll interval — without this, a manual resume
1567
+ // inside a 5s wait would be followed by a stale auto
1568
+ // /continue that clobbers hop_count on an already-running
1569
+ // chain (router.continue_routing resets the counter
1570
+ // unconditionally). The re-check closes the race.
1571
+ let stillPaused = false;
1572
+ try {
1573
+ const { url: freshBase, token: freshToken } = resolveProjectChattr(project.id);
1574
+ if (freshBase) {
1575
+ const sr = await fetch(`${freshBase}/api/status`, {
1576
+ headers: freshToken ? { "x-session-token": freshToken } : {},
1577
+ signal: AbortSignal.timeout(5000),
1578
+ });
1579
+ if (sr.ok) {
1580
+ const sd = await sr.json();
1581
+ stillPaused = !!(sd && sd.paused);
1582
+ }
1583
+ }
1584
+ } catch {
1585
+ // Status re-check failed — fall back to "don't fire".
1586
+ // Stuck pause will still be caught on the next 10s tick.
1587
+ }
1588
+ if (!stillPaused) {
1589
+ console.log(`[loop-guard] ${project.id} auto-continue cancelled (router already resumed)`);
1590
+ } else {
1591
+ const res = await fetch(`http://127.0.0.1:${PORT}/api/chat?project=${encodeURIComponent(project.id)}`, {
1592
+ method: "POST",
1593
+ headers: { "Content-Type": "application/json" },
1594
+ body: JSON.stringify({ text: "/continue", channel: "general" }),
1595
+ });
1596
+ if (res.ok) console.log(`[loop-guard] ${project.id} auto-continued`);
1597
+ else console.warn(`[loop-guard] ${project.id} auto-continue POST returned ${res.status}`);
1598
+ }
1599
+ }
1600
+ } catch (err) {
1601
+ console.warn(`[loop-guard] ${project.id} auto-continue failed: ${err.message || err}`);
1602
+ }
1603
+ const s2 = _loopGuardPausedState.get(project.id);
1604
+ if (s2) s2.scheduled = null;
1605
+ }, delaySec * 1000);
1606
+ }
1607
+ // Transition true → false: clear any pending timer.
1608
+ if (!paused && state.paused && state.scheduled) {
1609
+ clearTimeout(state.scheduled);
1610
+ state.scheduled = null;
1611
+ }
1612
+ state.paused = paused;
1613
+ _loopGuardPausedState.set(project.id, state);
1614
+ }
1615
+
1616
+ function runLoopGuardPollingTick() {
1617
+ try {
1618
+ const cfg = readConfig();
1619
+ for (const p of (cfg.projects || [])) {
1620
+ if (p && p.auto_continue_loop_guard) checkLoopGuardPause(p);
1621
+ }
1622
+ } catch {
1623
+ // config unreadable — next tick will retry
1624
+ }
1625
+ }
1626
+
1627
+ setInterval(runLoopGuardPollingTick, LOOP_GUARD_POLL_INTERVAL_MS);
1628
+
1382
1629
  // --- Start ---
1383
1630
 
1384
1631
  server.listen(PORT, "127.0.0.1", () => {