@adaptic/maestro 1.7.3 → 1.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/commands/init-maestro.md +15 -2
- package/.gitignore +7 -0
- package/README.md +62 -11
- package/bin/maestro.mjs +338 -2
- package/bin/maestro.test.mjs +299 -0
- package/docs/guides/poller-daemon-setup.md +21 -8
- package/docs/runbooks/perpetual-operations.md +19 -15
- package/docs/runbooks/recovery-and-failover.md +42 -0
- package/lib/cadence-bus.mjs +625 -0
- package/lib/cadence-bus.test.mjs +354 -0
- package/package.json +6 -1
- package/scaffold/CLAUDE.md +11 -7
- package/scripts/cadence/cadence-status.mjs +36 -0
- package/scripts/cadence/enqueue-cadence-tick.mjs +158 -0
- package/scripts/cadence/enqueue-cadence-tick.test.mjs +154 -0
- package/scripts/cadence/launchd-cadence-wrapper.sh +85 -0
- package/scripts/daemon/cadence-consumer.mjs +439 -0
- package/scripts/daemon/cadence-consumer.test.mjs +397 -0
- package/scripts/daemon/cadence-handlers.mjs +263 -0
- package/scripts/daemon/maestro-daemon.mjs +20 -0
- package/scripts/local-triggers/generate-plists.sh +62 -17
- package/scripts/local-triggers/generate-plists.test.mjs +254 -0
- package/scripts/local-triggers/plists/.gitkeep +0 -0
- package/scripts/local-triggers/run-trigger.sh +22 -3
- package/scripts/local-triggers/plists/ai.adaptic.sophie-backlog-executor.plist +0 -21
- package/scripts/local-triggers/plists/ai.adaptic.sophie-daemon.plist +0 -32
- package/scripts/local-triggers/plists/ai.adaptic.sophie-inbox-processor.plist +0 -21
- package/scripts/local-triggers/plists/ai.adaptic.sophie-meeting-action-capture.plist +0 -21
- package/scripts/local-triggers/plists/ai.adaptic.sophie-meeting-prep.plist +0 -21
- package/scripts/local-triggers/plists/ai.adaptic.sophie-midday-sweep.plist +0 -26
- package/scripts/local-triggers/plists/ai.adaptic.sophie-quarterly-self-assessment.plist +0 -62
- package/scripts/local-triggers/plists/ai.adaptic.sophie-weekly-engineering-health.plist +0 -28
- package/scripts/local-triggers/plists/ai.adaptic.sophie-weekly-execution.plist +0 -28
- package/scripts/local-triggers/plists/ai.adaptic.sophie-weekly-hiring.plist +0 -28
- package/scripts/local-triggers/plists/ai.adaptic.sophie-weekly-priorities.plist +0 -28
- package/scripts/local-triggers/plists/ai.adaptic.sophie-weekly-strategic-memo.plist +0 -28
|
@@ -0,0 +1,397 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* cadence-consumer.test.mjs — node:test coverage for the cadence consumer.
|
|
3
|
+
*
|
|
4
|
+
* Uses startConsumer({ spawnSession }) to inject a stub sub-session spawner
|
|
5
|
+
* so no real Claude Code process is ever launched. Each test creates its own
|
|
6
|
+
* AGENT_ROOT under tmpdir() and tears it down afterwards.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
import { test } from "node:test";
|
|
10
|
+
import assert from "node:assert/strict";
|
|
11
|
+
import { promises as fsp } from "fs";
|
|
12
|
+
import { writeFileSync, mkdirSync } from "node:fs";
|
|
13
|
+
import { tmpdir } from "os";
|
|
14
|
+
import { join, dirname } from "path";
|
|
15
|
+
|
|
16
|
+
import { enqueueTick, listInbox, listClaimed, busDepth, getBusPaths } from "../../lib/cadence-bus.mjs";
|
|
17
|
+
import { startConsumer } from "./cadence-consumer.mjs";
|
|
18
|
+
|
|
19
|
+
async function makeAgentRoot() {
|
|
20
|
+
const path = join(
|
|
21
|
+
tmpdir(),
|
|
22
|
+
`cadence-consumer-test-${process.pid}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`
|
|
23
|
+
);
|
|
24
|
+
await fsp.mkdir(path, { recursive: true });
|
|
25
|
+
return path;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
async function rmRoot(path) {
|
|
29
|
+
try { await fsp.rm(path, { recursive: true, force: true }); } catch { /* */ }
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* Seed an agent root with a trigger prompt for a given cadence. Lets the
|
|
34
|
+
* consumer's "escalate" path find a real prompt file when our stub spawner
|
|
35
|
+
* is invoked.
|
|
36
|
+
*/
|
|
37
|
+
function plantPrompt(root, cadence, body = `# ${cadence} trigger prompt`) {
|
|
38
|
+
const path = join(root, "schedules/triggers", `${cadence}.md`);
|
|
39
|
+
mkdirSync(dirname(path), { recursive: true });
|
|
40
|
+
writeFileSync(path, body);
|
|
41
|
+
return path;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Wait until `predicate()` returns truthy or the deadline expires. Polls
|
|
46
|
+
* every 25ms — fast enough for sub-200ms drain cycles, slow enough that
|
|
47
|
+
* tests don't burn CPU.
|
|
48
|
+
*/
|
|
49
|
+
async function waitFor(predicate, { timeoutMs = 5_000, intervalMs = 25 } = {}) {
|
|
50
|
+
const deadline = Date.now() + timeoutMs;
|
|
51
|
+
while (Date.now() < deadline) {
|
|
52
|
+
if (await predicate()) return true;
|
|
53
|
+
await new Promise((r) => setTimeout(r, intervalMs));
|
|
54
|
+
}
|
|
55
|
+
return false;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// ---------------------------------------------------------------------------
|
|
59
|
+
// Inline handlers
|
|
60
|
+
// ---------------------------------------------------------------------------
|
|
61
|
+
|
|
62
|
+
test("consumer processes a registry-inline cadence without spawning", async () => {
|
|
63
|
+
const root = await makeAgentRoot();
|
|
64
|
+
const spawnCalls = [];
|
|
65
|
+
const consumer = startConsumer({
|
|
66
|
+
agentRoot: root,
|
|
67
|
+
pollMs: 50,
|
|
68
|
+
heartbeatMs: 1_000,
|
|
69
|
+
recoveryMs: 60_000,
|
|
70
|
+
spawnSession: async (opts) => { spawnCalls.push(opts); return { ok: true, exit_code: 0, duration_ms: 1 }; },
|
|
71
|
+
});
|
|
72
|
+
try {
|
|
73
|
+
enqueueTick({ cadence: "cadence-bus-heartbeat", source: "manual", agentRoot: root });
|
|
74
|
+
const ok = await waitFor(() => consumer.getStats().inline >= 1);
|
|
75
|
+
assert.ok(ok, "heartbeat cadence should drain inline");
|
|
76
|
+
assert.equal(spawnCalls.length, 0, "inline cadence must not spawn");
|
|
77
|
+
assert.equal(busDepth(root).inbox, 0);
|
|
78
|
+
assert.equal(busDepth(root).claimed, 0);
|
|
79
|
+
} finally {
|
|
80
|
+
await consumer.stop();
|
|
81
|
+
await rmRoot(root);
|
|
82
|
+
}
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
// ---------------------------------------------------------------------------
|
|
86
|
+
// Guarded handlers
|
|
87
|
+
// ---------------------------------------------------------------------------
|
|
88
|
+
|
|
89
|
+
test("guarded cadence completes inline when there's no work", async () => {
|
|
90
|
+
const root = await makeAgentRoot();
|
|
91
|
+
const spawnCalls = [];
|
|
92
|
+
const consumer = startConsumer({
|
|
93
|
+
agentRoot: root,
|
|
94
|
+
pollMs: 50,
|
|
95
|
+
spawnSession: async (opts) => { spawnCalls.push(opts); return { ok: true, exit_code: 0 }; },
|
|
96
|
+
});
|
|
97
|
+
try {
|
|
98
|
+
// No inbox files, no queues with items → guard returns "inline".
|
|
99
|
+
enqueueTick({ cadence: "inbox-processor", source: "launchd", agentRoot: root });
|
|
100
|
+
const ok = await waitFor(() => consumer.getStats().inline >= 1);
|
|
101
|
+
assert.ok(ok);
|
|
102
|
+
assert.equal(spawnCalls.length, 0, "inbox empty → no spawn");
|
|
103
|
+
} finally {
|
|
104
|
+
await consumer.stop();
|
|
105
|
+
await rmRoot(root);
|
|
106
|
+
}
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
test("guarded cadence escalates when guard says work is pending", async () => {
|
|
110
|
+
const root = await makeAgentRoot();
|
|
111
|
+
// Seed an inbox item so the inbox-processor guard returns "escalate".
|
|
112
|
+
mkdirSync(join(root, "state/inbox/slack"), { recursive: true });
|
|
113
|
+
writeFileSync(join(root, "state/inbox/slack/item.json"), JSON.stringify({ id: "msg1" }));
|
|
114
|
+
plantPrompt(root, "inbox-processor");
|
|
115
|
+
const spawnCalls = [];
|
|
116
|
+
const consumer = startConsumer({
|
|
117
|
+
agentRoot: root,
|
|
118
|
+
pollMs: 50,
|
|
119
|
+
spawnSession: async (opts) => {
|
|
120
|
+
spawnCalls.push(opts);
|
|
121
|
+
return { ok: true, exit_code: 0, duration_ms: 5 };
|
|
122
|
+
},
|
|
123
|
+
});
|
|
124
|
+
try {
|
|
125
|
+
enqueueTick({ cadence: "inbox-processor", source: "launchd", agentRoot: root });
|
|
126
|
+
const ok = await waitFor(() => consumer.getStats().escalated >= 1);
|
|
127
|
+
assert.ok(ok, "guard with pending work should escalate");
|
|
128
|
+
assert.equal(spawnCalls.length, 1);
|
|
129
|
+
assert.equal(spawnCalls[0].cadence, "inbox-processor");
|
|
130
|
+
assert.match(spawnCalls[0].promptPath, /schedules\/triggers\/inbox-processor\.md$/);
|
|
131
|
+
} finally {
|
|
132
|
+
await consumer.stop();
|
|
133
|
+
await rmRoot(root);
|
|
134
|
+
}
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
// ---------------------------------------------------------------------------
|
|
138
|
+
// Escalate path
|
|
139
|
+
// ---------------------------------------------------------------------------
|
|
140
|
+
|
|
141
|
+
test("escalate-mode cadence spawns with the configured prompt", async () => {
|
|
142
|
+
const root = await makeAgentRoot();
|
|
143
|
+
plantPrompt(root, "weekly-strategic-memo");
|
|
144
|
+
const spawnCalls = [];
|
|
145
|
+
const consumer = startConsumer({
|
|
146
|
+
agentRoot: root,
|
|
147
|
+
pollMs: 50,
|
|
148
|
+
spawnSession: async (opts) => {
|
|
149
|
+
spawnCalls.push(opts);
|
|
150
|
+
return { ok: true, exit_code: 0, duration_ms: 10 };
|
|
151
|
+
},
|
|
152
|
+
});
|
|
153
|
+
try {
|
|
154
|
+
enqueueTick({ cadence: "weekly-strategic-memo", source: "launchd", agentRoot: root });
|
|
155
|
+
const ok = await waitFor(() => consumer.getStats().escalated >= 1);
|
|
156
|
+
assert.ok(ok);
|
|
157
|
+
assert.equal(spawnCalls[0].cadence, "weekly-strategic-memo");
|
|
158
|
+
} finally {
|
|
159
|
+
await consumer.stop();
|
|
160
|
+
await rmRoot(root);
|
|
161
|
+
}
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
test("unknown cadence with a prompt file falls back to escalate", async () => {
|
|
165
|
+
const root = await makeAgentRoot();
|
|
166
|
+
plantPrompt(root, "custom-cadence");
|
|
167
|
+
const spawnCalls = [];
|
|
168
|
+
const consumer = startConsumer({
|
|
169
|
+
agentRoot: root,
|
|
170
|
+
pollMs: 50,
|
|
171
|
+
spawnSession: async (opts) => {
|
|
172
|
+
spawnCalls.push(opts);
|
|
173
|
+
return { ok: true, exit_code: 0 };
|
|
174
|
+
},
|
|
175
|
+
});
|
|
176
|
+
try {
|
|
177
|
+
enqueueTick({ cadence: "custom-cadence", source: "manual", agentRoot: root });
|
|
178
|
+
const ok = await waitFor(() => consumer.getStats().escalated >= 1);
|
|
179
|
+
assert.ok(ok);
|
|
180
|
+
assert.equal(spawnCalls.length, 1);
|
|
181
|
+
} finally {
|
|
182
|
+
await consumer.stop();
|
|
183
|
+
await rmRoot(root);
|
|
184
|
+
}
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
test("unknown cadence with no prompt file DLQ's immediately", async () => {
|
|
188
|
+
const root = await makeAgentRoot();
|
|
189
|
+
const spawnCalls = [];
|
|
190
|
+
const consumer = startConsumer({
|
|
191
|
+
agentRoot: root,
|
|
192
|
+
pollMs: 50,
|
|
193
|
+
spawnSession: async (opts) => { spawnCalls.push(opts); return { ok: true, exit_code: 0 }; },
|
|
194
|
+
});
|
|
195
|
+
try {
|
|
196
|
+
enqueueTick({ cadence: "totally-unknown-thing", source: "manual", agentRoot: root });
|
|
197
|
+
const ok = await waitFor(() => consumer.getStats().dlq >= 1);
|
|
198
|
+
assert.ok(ok);
|
|
199
|
+
assert.equal(spawnCalls.length, 0);
|
|
200
|
+
} finally {
|
|
201
|
+
await consumer.stop();
|
|
202
|
+
await rmRoot(root);
|
|
203
|
+
}
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
// ---------------------------------------------------------------------------
|
|
207
|
+
// Spawn failures
|
|
208
|
+
// ---------------------------------------------------------------------------
|
|
209
|
+
|
|
210
|
+
test("spawn failure retries within the budget, then DLQs", async () => {
|
|
211
|
+
const root = await makeAgentRoot();
|
|
212
|
+
plantPrompt(root, "weekly-strategic-memo");
|
|
213
|
+
const consumer = startConsumer({
|
|
214
|
+
agentRoot: root,
|
|
215
|
+
pollMs: 25,
|
|
216
|
+
spawnSession: async () => ({ ok: false, exit_code: 1, error: "always-fail", duration_ms: 1 }),
|
|
217
|
+
});
|
|
218
|
+
try {
|
|
219
|
+
enqueueTick({ cadence: "weekly-strategic-memo", source: "launchd", agentRoot: root });
|
|
220
|
+
const ok = await waitFor(() => consumer.getStats().dlq >= 1, { timeoutMs: 15_000 });
|
|
221
|
+
assert.ok(ok, `event should DLQ eventually; stats=${JSON.stringify(consumer.getStats())}`);
|
|
222
|
+
assert.ok(consumer.getStats().spawn_failures >= 1);
|
|
223
|
+
} finally {
|
|
224
|
+
await consumer.stop();
|
|
225
|
+
await rmRoot(root);
|
|
226
|
+
}
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
// ---------------------------------------------------------------------------
|
|
230
|
+
// Emergency stop
|
|
231
|
+
// ---------------------------------------------------------------------------
|
|
232
|
+
|
|
233
|
+
test("consumer skips processing while .emergency-stop exists", async () => {
|
|
234
|
+
const root = await makeAgentRoot();
|
|
235
|
+
writeFileSync(join(root, ".emergency-stop"), "now");
|
|
236
|
+
plantPrompt(root, "weekly-strategic-memo");
|
|
237
|
+
let spawnCount = 0;
|
|
238
|
+
const consumer = startConsumer({
|
|
239
|
+
agentRoot: root,
|
|
240
|
+
pollMs: 50,
|
|
241
|
+
spawnSession: async () => { spawnCount++; return { ok: true, exit_code: 0 }; },
|
|
242
|
+
});
|
|
243
|
+
try {
|
|
244
|
+
// Enqueue directly to the inbox: the bus's emergency-stop check is on
|
|
245
|
+
// enqueue, so we must bypass it. Drop a file straight in inbox/.
|
|
246
|
+
const paths = getBusPaths(root);
|
|
247
|
+
writeFileSync(join(paths.inbox, "evt-test.json"), JSON.stringify({
|
|
248
|
+
id: "evt-test", type: "cadence_tick", source: "manual", ts: new Date().toISOString(),
|
|
249
|
+
cadence: "weekly-strategic-memo", workflow: null, correlation_id: null,
|
|
250
|
+
priority: "normal", metadata: {}, attempts: 0,
|
|
251
|
+
}));
|
|
252
|
+
await new Promise((r) => setTimeout(r, 300));
|
|
253
|
+
const stats = consumer.getStats();
|
|
254
|
+
assert.equal(spawnCount, 0, "no spawn during emergency stop");
|
|
255
|
+
assert.ok(stats.skipped_emergency_stop >= 1);
|
|
256
|
+
assert.equal(listInbox(root).length, 1, "event remains in inbox until stop lifts");
|
|
257
|
+
} finally {
|
|
258
|
+
await consumer.stop();
|
|
259
|
+
await rmRoot(root);
|
|
260
|
+
}
|
|
261
|
+
});
|
|
262
|
+
|
|
263
|
+
test("consumer resumes after .emergency-stop is removed", async () => {
|
|
264
|
+
const root = await makeAgentRoot();
|
|
265
|
+
writeFileSync(join(root, ".emergency-stop"), "now");
|
|
266
|
+
plantPrompt(root, "weekly-strategic-memo");
|
|
267
|
+
let spawnCount = 0;
|
|
268
|
+
const consumer = startConsumer({
|
|
269
|
+
agentRoot: root,
|
|
270
|
+
pollMs: 25,
|
|
271
|
+
spawnSession: async () => { spawnCount++; return { ok: true, exit_code: 0 }; },
|
|
272
|
+
});
|
|
273
|
+
try {
|
|
274
|
+
const paths = getBusPaths(root);
|
|
275
|
+
writeFileSync(join(paths.inbox, "evt-test.json"), JSON.stringify({
|
|
276
|
+
id: "evt-test", type: "cadence_tick", source: "manual", ts: new Date().toISOString(),
|
|
277
|
+
cadence: "weekly-strategic-memo", workflow: null, correlation_id: null,
|
|
278
|
+
priority: "normal", metadata: {}, attempts: 0,
|
|
279
|
+
}));
|
|
280
|
+
await new Promise((r) => setTimeout(r, 200));
|
|
281
|
+
assert.equal(spawnCount, 0);
|
|
282
|
+
// Lift the stop.
|
|
283
|
+
await fsp.unlink(join(root, ".emergency-stop"));
|
|
284
|
+
const ok = await waitFor(() => spawnCount >= 1);
|
|
285
|
+
assert.ok(ok, "should process after stop lifts");
|
|
286
|
+
assert.ok(consumer.getStats().escalated >= 1);
|
|
287
|
+
} finally {
|
|
288
|
+
await consumer.stop();
|
|
289
|
+
await rmRoot(root);
|
|
290
|
+
}
|
|
291
|
+
});
|
|
292
|
+
|
|
293
|
+
// ---------------------------------------------------------------------------
|
|
294
|
+
// Stale claim recovery on tick
|
|
295
|
+
// ---------------------------------------------------------------------------
|
|
296
|
+
|
|
297
|
+
test("consumer recovers stale claims on its periodic sweep", async () => {
|
|
298
|
+
const root = await makeAgentRoot();
|
|
299
|
+
// Plant a stale claim directly.
|
|
300
|
+
const paths = getBusPaths(root);
|
|
301
|
+
mkdirSync(paths.claimed, { recursive: true });
|
|
302
|
+
const stalePath = join(paths.claimed, "evt-stale.json");
|
|
303
|
+
writeFileSync(stalePath, JSON.stringify({
|
|
304
|
+
id: "evt-stale", cadence: "cadence-bus-heartbeat",
|
|
305
|
+
source: "manual", ts: "2026-01-01T00:00:00.000Z",
|
|
306
|
+
workflow: null, correlation_id: null, priority: "normal",
|
|
307
|
+
metadata: {}, attempts: 1, type: "cadence_tick",
|
|
308
|
+
}));
|
|
309
|
+
const oldMs = Date.now() - 60 * 60 * 1000; // 1h old
|
|
310
|
+
await fsp.utimes(stalePath, new Date(oldMs), new Date(oldMs));
|
|
311
|
+
|
|
312
|
+
const consumer = startConsumer({
|
|
313
|
+
agentRoot: root, pollMs: 50,
|
|
314
|
+
spawnSession: async () => ({ ok: true, exit_code: 0 }),
|
|
315
|
+
});
|
|
316
|
+
try {
|
|
317
|
+
// tickOnce manually so we don't depend on the periodic recovery timer.
|
|
318
|
+
await consumer.tickOnce();
|
|
319
|
+
const ok = await waitFor(() => consumer.getStats().inline >= 1, { timeoutMs: 5_000 });
|
|
320
|
+
assert.ok(ok, "stale claim should be recovered and processed");
|
|
321
|
+
} finally {
|
|
322
|
+
await consumer.stop();
|
|
323
|
+
await rmRoot(root);
|
|
324
|
+
}
|
|
325
|
+
});
|
|
326
|
+
|
|
327
|
+
// ---------------------------------------------------------------------------
|
|
328
|
+
// Health
|
|
329
|
+
// ---------------------------------------------------------------------------
|
|
330
|
+
|
|
331
|
+
test("consumer writes a heartbeat to state/cadence-bus/health.json", async () => {
|
|
332
|
+
const root = await makeAgentRoot();
|
|
333
|
+
const consumer = startConsumer({
|
|
334
|
+
agentRoot: root,
|
|
335
|
+
pollMs: 100,
|
|
336
|
+
heartbeatMs: 50, // force frequent heartbeats so the test doesn't drag
|
|
337
|
+
spawnSession: async () => ({ ok: true, exit_code: 0 }),
|
|
338
|
+
});
|
|
339
|
+
try {
|
|
340
|
+
const ok = await waitFor(async () => {
|
|
341
|
+
try {
|
|
342
|
+
const raw = await fsp.readFile(join(root, "state/cadence-bus/health.json"), "utf-8");
|
|
343
|
+
const h = JSON.parse(raw);
|
|
344
|
+
return typeof h.ts === "string" && h.version;
|
|
345
|
+
} catch { return false; }
|
|
346
|
+
});
|
|
347
|
+
assert.ok(ok, "health.json must be written");
|
|
348
|
+
} finally {
|
|
349
|
+
await consumer.stop();
|
|
350
|
+
await rmRoot(root);
|
|
351
|
+
}
|
|
352
|
+
});
|
|
353
|
+
|
|
354
|
+
// ---------------------------------------------------------------------------
|
|
355
|
+
// Sub-session policy boundary
|
|
356
|
+
// ---------------------------------------------------------------------------
|
|
357
|
+
|
|
358
|
+
test("explicit reason is attached to event metadata before escalation", async () => {
|
|
359
|
+
// The consumer must surface WHY a sub-session was spawned. For guarded
|
|
360
|
+
// cadences this comes from the guard's reason/result. For unknown
|
|
361
|
+
// cadences with prompts, the registry policy log line is sufficient,
|
|
362
|
+
// but for audit visibility we attach guard data to event.metadata.
|
|
363
|
+
const root = await makeAgentRoot();
|
|
364
|
+
// Force a guard:escalate outcome via inbox-processor with pending items.
|
|
365
|
+
mkdirSync(join(root, "state/inbox/internal"), { recursive: true });
|
|
366
|
+
writeFileSync(join(root, "state/inbox/internal/x.json"), "{}");
|
|
367
|
+
plantPrompt(root, "inbox-processor");
|
|
368
|
+
const seen = [];
|
|
369
|
+
const consumer = startConsumer({
|
|
370
|
+
agentRoot: root, pollMs: 50,
|
|
371
|
+
spawnSession: async (opts) => { seen.push(opts); return { ok: true, exit_code: 0 }; },
|
|
372
|
+
});
|
|
373
|
+
try {
|
|
374
|
+
enqueueTick({ cadence: "inbox-processor", source: "launchd", agentRoot: root });
|
|
375
|
+
const ok = await waitFor(() => seen.length > 0);
|
|
376
|
+
assert.ok(ok);
|
|
377
|
+
// The event archive should record guard_result so future audits can
|
|
378
|
+
// trace why the sub-session ran.
|
|
379
|
+
const procDir = join(root, "state/cadence-bus/processed");
|
|
380
|
+
// Drill down by date
|
|
381
|
+
const dates = (await fsp.readdir(procDir)).filter((n) => /^\d{4}-\d{2}-\d{2}$/.test(n));
|
|
382
|
+
assert.ok(dates.length >= 1);
|
|
383
|
+
const files = await fsp.readdir(join(procDir, dates[0]));
|
|
384
|
+
const archive = JSON.parse(await fsp.readFile(join(procDir, dates[0], files[0]), "utf-8"));
|
|
385
|
+
assert.equal(archive.result.decision, "escalated");
|
|
386
|
+
// The consumer attaches the guard's outcome to the event metadata so
|
|
387
|
+
// audits can trace exactly why a sub-session ran. We expect either a
|
|
388
|
+
// recorded reason string or the full guard object.
|
|
389
|
+
assert.ok(
|
|
390
|
+
archive.metadata && (archive.metadata.reason || archive.metadata.guard),
|
|
391
|
+
`expected metadata.reason or metadata.guard; got ${JSON.stringify(archive.metadata)}`,
|
|
392
|
+
);
|
|
393
|
+
} finally {
|
|
394
|
+
await consumer.stop();
|
|
395
|
+
await rmRoot(root);
|
|
396
|
+
}
|
|
397
|
+
});
|
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Maestro — Cadence Handler Registry
|
|
3
|
+
*
|
|
4
|
+
* Per-cadence policy for how the persistent main session services an event
|
|
5
|
+
* delivered onto the cadence bus.
|
|
6
|
+
*
|
|
7
|
+
* Three handler modes:
|
|
8
|
+
*
|
|
9
|
+
* "inline" — Handler runs entirely inside the persistent main Node
|
|
10
|
+
* process. No Claude Code spawned. Use for housekeeping
|
|
11
|
+
* (dashboard refresh, log rotation, queue sweeps that need
|
|
12
|
+
* no reasoning).
|
|
13
|
+
*
|
|
14
|
+
* "guarded" — Persistent process runs a lightweight pre-check (e.g.
|
|
15
|
+
* "is there anything in the inbox?") and only escalates to
|
|
16
|
+
* a sub-session if substantive work is required. If the
|
|
17
|
+
* pre-check says "no work", the tick completes inline.
|
|
18
|
+
*
|
|
19
|
+
* "escalate" — The cadence always needs Claude-grade reasoning or
|
|
20
|
+
* substantive drafting; the consumer spawns a sub-session
|
|
21
|
+
* with the trigger prompt under schedules/triggers/<name>.md.
|
|
22
|
+
* Reserved for substantive drafting, multi-step outreach,
|
|
23
|
+
* large audits, research, and explicit isolated audit
|
|
24
|
+
* boundaries.
|
|
25
|
+
*
|
|
26
|
+
* Handlers / guards receive ({ event, agentRoot, log }) and return a result
|
|
27
|
+
* object that becomes part of the processed event's archive record. They
|
|
28
|
+
* must not throw — return { ok:false, error } so the consumer can decide to
|
|
29
|
+
* retry or DLQ via its policy.
|
|
30
|
+
*
|
|
31
|
+
* Adding a new cadence: add an entry below. If you skip an entry the
|
|
32
|
+
* consumer falls back to escalate behaviour as long as a trigger prompt
|
|
33
|
+
* exists at schedules/triggers/<cadence>.md; otherwise it DLQ's the event
|
|
34
|
+
* with a clear error.
|
|
35
|
+
*/
|
|
36
|
+
|
|
37
|
+
import { existsSync, readdirSync, readFileSync } from "node:fs";
|
|
38
|
+
import { join } from "node:path";
|
|
39
|
+
import { recoverStaleClaims } from "../../lib/cadence-bus.mjs";
|
|
40
|
+
|
|
41
|
+
// ---------------------------------------------------------------------------
|
|
42
|
+
// Pre-check helpers (shared across guarded handlers)
|
|
43
|
+
// ---------------------------------------------------------------------------
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Count files in a directory matching an optional regex. Returns 0 if the
|
|
47
|
+
* directory doesn't exist (idiomatic for fresh agent repos).
|
|
48
|
+
*/
|
|
49
|
+
function countFiles(dir, re) {
|
|
50
|
+
if (!existsSync(dir)) return 0;
|
|
51
|
+
try {
|
|
52
|
+
const all = readdirSync(dir);
|
|
53
|
+
return re ? all.filter((n) => re.test(n)).length : all.length;
|
|
54
|
+
} catch {
|
|
55
|
+
return 0;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Return queue file names that contain at least one "- " entry. Avoids a
|
|
61
|
+
* YAML parser dep: cheap line-scan, plenty good enough for a guard.
|
|
62
|
+
*/
|
|
63
|
+
function queuesWithWork(agentRoot) {
|
|
64
|
+
const dir = join(agentRoot, "state", "queues");
|
|
65
|
+
if (!existsSync(dir)) return [];
|
|
66
|
+
const out = [];
|
|
67
|
+
for (const name of readdirSync(dir)) {
|
|
68
|
+
if (!name.endsWith(".yaml")) continue;
|
|
69
|
+
try {
|
|
70
|
+
const body = readFileSync(join(dir, name), "utf-8");
|
|
71
|
+
if (/^- /m.test(body)) out.push(name);
|
|
72
|
+
} catch { /* ignore */ }
|
|
73
|
+
}
|
|
74
|
+
return out;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// ---------------------------------------------------------------------------
|
|
78
|
+
// Inline handlers
|
|
79
|
+
// ---------------------------------------------------------------------------
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Heartbeat tick — purely internal. The consumer writes health.json on
|
|
83
|
+
* every cycle anyway; this handler exists so external schedulers (init,
|
|
84
|
+
* upgrade, doctor) can drop a heartbeat into the bus to verify end-to-end
|
|
85
|
+
* delivery.
|
|
86
|
+
*/
|
|
87
|
+
async function handleHeartbeat({ event }) {
|
|
88
|
+
return {
|
|
89
|
+
ok: true,
|
|
90
|
+
decision: "inline",
|
|
91
|
+
cadence: event.cadence,
|
|
92
|
+
note: "heartbeat acknowledged",
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Stale-claim recovery — the consumer already does this on startup and
|
|
98
|
+
* periodically; an explicit cadence event lets ops force a sweep without
|
|
99
|
+
* restarting the daemon.
|
|
100
|
+
*/
|
|
101
|
+
async function handleStaleRecovery({ event, agentRoot }) {
|
|
102
|
+
const stats = recoverStaleClaims(agentRoot);
|
|
103
|
+
return { ok: true, decision: "inline", cadence: event.cadence, stats };
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// ---------------------------------------------------------------------------
|
|
107
|
+
// Guarded handlers — cheap pre-check, escalate only when there's work
|
|
108
|
+
// ---------------------------------------------------------------------------
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* inbox-processor guard:
|
|
112
|
+
* - Count items under state/inbox/{slack,gmail,calendar,internal,sms,whatsapp}
|
|
113
|
+
* that have NOT been moved to state/inbox/processed/.
|
|
114
|
+
* - If 0, complete inline (saves a Claude Code spawn).
|
|
115
|
+
* - If >0, escalate so a sub-session can classify and route them.
|
|
116
|
+
*/
|
|
117
|
+
async function guardInboxProcessor({ agentRoot }) {
|
|
118
|
+
const inboxRoot = join(agentRoot, "state", "inbox");
|
|
119
|
+
const sources = ["slack", "gmail", "calendar", "internal", "sms", "whatsapp"];
|
|
120
|
+
let pending = 0;
|
|
121
|
+
for (const src of sources) {
|
|
122
|
+
pending += countFiles(join(inboxRoot, src), /\.(json|ya?ml)$/i);
|
|
123
|
+
}
|
|
124
|
+
if (pending === 0) {
|
|
125
|
+
return { ok: true, decision: "inline", reason: "inbox empty", pending: 0 };
|
|
126
|
+
}
|
|
127
|
+
return { ok: true, decision: "escalate", pending };
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/**
|
|
131
|
+
* backlog-executor guard:
|
|
132
|
+
* - Look at state/queues/*.yaml; any queue with at least one item is work.
|
|
133
|
+
* - If none, complete inline.
|
|
134
|
+
* - If any, escalate.
|
|
135
|
+
*/
|
|
136
|
+
async function guardBacklogExecutor({ agentRoot }) {
|
|
137
|
+
const withWork = queuesWithWork(agentRoot);
|
|
138
|
+
if (withWork.length === 0) {
|
|
139
|
+
return { ok: true, decision: "inline", reason: "all queues empty" };
|
|
140
|
+
}
|
|
141
|
+
return { ok: true, decision: "escalate", queues_with_work: withWork };
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/**
|
|
145
|
+
* meeting-prep guard: only escalate if today's calendar inbox has any
|
|
146
|
+
* unprocessed event files. The cheap signal is just checking
|
|
147
|
+
* state/inbox/calendar/ for unprocessed entries.
|
|
148
|
+
*/
|
|
149
|
+
async function guardMeetingPrep({ agentRoot }) {
|
|
150
|
+
const dir = join(agentRoot, "state", "inbox", "calendar");
|
|
151
|
+
const pending = countFiles(dir, /\.(json|ya?ml)$/i);
|
|
152
|
+
if (pending === 0) {
|
|
153
|
+
return { ok: true, decision: "inline", reason: "no pending calendar events" };
|
|
154
|
+
}
|
|
155
|
+
return { ok: true, decision: "escalate", pending };
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// ---------------------------------------------------------------------------
|
|
159
|
+
// Registry
|
|
160
|
+
// ---------------------------------------------------------------------------
|
|
161
|
+
|
|
162
|
+
export const CADENCE_REGISTRY = {
|
|
163
|
+
// Internal housekeeping (always inline)
|
|
164
|
+
"cadence-bus-heartbeat": {
|
|
165
|
+
mode: "inline",
|
|
166
|
+
handler: handleHeartbeat,
|
|
167
|
+
description: "Probe tick used by doctor / upgrade to verify end-to-end delivery.",
|
|
168
|
+
},
|
|
169
|
+
"cadence-bus-recovery": {
|
|
170
|
+
mode: "inline",
|
|
171
|
+
handler: handleStaleRecovery,
|
|
172
|
+
description: "On-demand stale-claim recovery.",
|
|
173
|
+
},
|
|
174
|
+
|
|
175
|
+
// Operational cadences with cheap pre-checks
|
|
176
|
+
"inbox-processor": {
|
|
177
|
+
mode: "guarded",
|
|
178
|
+
guard: guardInboxProcessor,
|
|
179
|
+
prompt: "schedules/triggers/inbox-processor.md",
|
|
180
|
+
description: "Classify / route inbound items if any are waiting.",
|
|
181
|
+
},
|
|
182
|
+
"backlog-executor": {
|
|
183
|
+
mode: "guarded",
|
|
184
|
+
guard: guardBacklogExecutor,
|
|
185
|
+
prompt: "schedules/triggers/backlog-executor.md",
|
|
186
|
+
description: "Execute the next slice of the proactive backlog if queues are non-empty.",
|
|
187
|
+
},
|
|
188
|
+
"meeting-prep": {
|
|
189
|
+
mode: "guarded",
|
|
190
|
+
guard: guardMeetingPrep,
|
|
191
|
+
prompt: "schedules/triggers/meeting-prep.md",
|
|
192
|
+
description: "Prepare for upcoming meetings when there are unprocessed calendar items.",
|
|
193
|
+
},
|
|
194
|
+
|
|
195
|
+
// Always-escalate (substantive reasoning / drafting required)
|
|
196
|
+
"meeting-action-capture": {
|
|
197
|
+
mode: "escalate",
|
|
198
|
+
prompt: "schedules/triggers/meeting-action-capture.md",
|
|
199
|
+
description: "Capture meeting action items into queues.",
|
|
200
|
+
},
|
|
201
|
+
"daily-morning-brief": {
|
|
202
|
+
mode: "escalate",
|
|
203
|
+
prompt: "schedules/triggers/daily-morning-brief.md",
|
|
204
|
+
description: "Daily morning brief.",
|
|
205
|
+
},
|
|
206
|
+
"daily-midday-sweep": {
|
|
207
|
+
mode: "escalate",
|
|
208
|
+
prompt: "schedules/triggers/daily-midday-sweep.md",
|
|
209
|
+
description: "Midday comms triage and queue sweep.",
|
|
210
|
+
},
|
|
211
|
+
"daily-evening-wrap": {
|
|
212
|
+
mode: "escalate",
|
|
213
|
+
prompt: "schedules/triggers/daily-evening-wrap.md",
|
|
214
|
+
description: "Evening wrap, log review, follow-up close.",
|
|
215
|
+
},
|
|
216
|
+
"weekly-hiring": {
|
|
217
|
+
mode: "escalate",
|
|
218
|
+
prompt: "schedules/triggers/weekly-hiring.md",
|
|
219
|
+
description: "Weekly hiring review.",
|
|
220
|
+
},
|
|
221
|
+
"weekly-priorities": {
|
|
222
|
+
mode: "escalate",
|
|
223
|
+
prompt: "schedules/triggers/weekly-priorities.md",
|
|
224
|
+
description: "Weekly priorities review.",
|
|
225
|
+
},
|
|
226
|
+
"weekly-engineering-health": {
|
|
227
|
+
mode: "escalate",
|
|
228
|
+
prompt: "schedules/triggers/weekly-engineering-health.md",
|
|
229
|
+
description: "Weekly engineering health.",
|
|
230
|
+
},
|
|
231
|
+
"weekly-execution": {
|
|
232
|
+
mode: "escalate",
|
|
233
|
+
prompt: "schedules/triggers/weekly-execution.md",
|
|
234
|
+
description: "Weekly execution review.",
|
|
235
|
+
},
|
|
236
|
+
"weekly-strategic-memo": {
|
|
237
|
+
mode: "escalate",
|
|
238
|
+
prompt: "schedules/triggers/weekly-strategic-memo.md",
|
|
239
|
+
description: "Weekly strategic memo drafting.",
|
|
240
|
+
},
|
|
241
|
+
"quarterly-self-assessment": {
|
|
242
|
+
mode: "escalate",
|
|
243
|
+
prompt: "schedules/triggers/quarterly-self-assessment.md",
|
|
244
|
+
description: "Quarterly self-assessment.",
|
|
245
|
+
},
|
|
246
|
+
};
|
|
247
|
+
|
|
248
|
+
/**
|
|
249
|
+
* Look up a cadence definition. Returns null for unknown cadences so the
|
|
250
|
+
* consumer can decide between inferred-escalate and DLQ based on prompt
|
|
251
|
+
* file availability.
|
|
252
|
+
*/
|
|
253
|
+
export function getCadenceDef(name) {
|
|
254
|
+
return CADENCE_REGISTRY[name] || null;
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
/**
|
|
258
|
+
* Convenience: list all registered cadence names. Used by doctor to confirm
|
|
259
|
+
* the consumer can route every cadence that the plist generator emits.
|
|
260
|
+
*/
|
|
261
|
+
export function listCadenceNames() {
|
|
262
|
+
return Object.keys(CADENCE_REGISTRY);
|
|
263
|
+
}
|
|
@@ -34,5 +34,25 @@ try {
|
|
|
34
34
|
}
|
|
35
35
|
} catch { /* maestro singleton not available */ }
|
|
36
36
|
|
|
37
|
+
// Start the cadence consumer alongside the reactive event loop. This is the
|
|
38
|
+
// single persistent owner of cadence housekeeping — launchd plists enqueue
|
|
39
|
+
// cadence ticks onto state/cadence-bus/ and the consumer drains them here,
|
|
40
|
+
// either handling them inline or escalating to a sub-session when warranted.
|
|
41
|
+
// Failure to start the consumer must NOT take the reactive daemon down, so
|
|
42
|
+
// we isolate startup errors.
|
|
43
|
+
try {
|
|
44
|
+
const { startConsumer } = await import("./cadence-consumer.mjs");
|
|
45
|
+
const consumer = startConsumer({ agentRoot: AGENT_DIR });
|
|
46
|
+
const shutdown = async (sig) => {
|
|
47
|
+
try { await consumer.stop(); } catch { /* ignore */ }
|
|
48
|
+
console.log(`[DAEMON] cadence consumer stopped (${sig})`);
|
|
49
|
+
};
|
|
50
|
+
process.on("SIGTERM", () => shutdown("SIGTERM"));
|
|
51
|
+
process.on("SIGINT", () => shutdown("SIGINT"));
|
|
52
|
+
} catch (err) {
|
|
53
|
+
console.error(`[DAEMON] cadence consumer failed to start: ${err.message}`);
|
|
54
|
+
// Reactive daemon continues. Doctor / healthcheck will surface this.
|
|
55
|
+
}
|
|
56
|
+
|
|
37
57
|
// Import and run the daemon (handles its own .env loading)
|
|
38
58
|
await import("./sophie-daemon.mjs");
|