discoclaw 0.5.2 → 0.5.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cron/chain.js +5 -0
- package/dist/cron/chain.test.js +226 -0
- package/dist/cron/cron-prompt.js +26 -4
- package/dist/cron/cron-prompt.test.js +78 -0
- package/dist/cron/executor.js +74 -1
- package/dist/cron/executor.test.js +102 -0
- package/dist/cron/run-stats.js +12 -1
- package/dist/cron/run-stats.test.js +68 -6
- package/dist/discord/actions-crons.js +112 -2
- package/dist/discord/actions-crons.test.js +48 -0
- package/dist/discord/update-command.js +2 -1
- package/dist/index.js +1 -0
- package/dist/npm-managed.js +1 -0
- package/dist/npm-managed.test.js +1 -0
- package/package.json +1 -1
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
import { describe, expect, it, vi, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import fs from 'node:fs/promises';
|
|
3
|
+
import path from 'node:path';
|
|
4
|
+
import os from 'node:os';
|
|
5
|
+
import { fireChainedJobs } from './executor.js';
|
|
6
|
+
import { loadRunStats } from './run-stats.js';
|
|
7
|
+
// ---------------------------------------------------------------------------
|
|
8
|
+
// Helpers
|
|
9
|
+
// ---------------------------------------------------------------------------
|
|
10
|
+
function mockLog() {
|
|
11
|
+
return { info: vi.fn(), warn: vi.fn(), error: vi.fn() };
|
|
12
|
+
}
|
|
13
|
+
let tmpDir;
|
|
14
|
+
beforeEach(async () => {
|
|
15
|
+
tmpDir = await fs.mkdtemp(path.join(os.tmpdir(), 'chain-test-'));
|
|
16
|
+
});
|
|
17
|
+
afterEach(async () => {
|
|
18
|
+
await fs.rm(tmpDir, { recursive: true, force: true, maxRetries: 3 });
|
|
19
|
+
});
|
|
20
|
+
async function makeStatsStore() {
|
|
21
|
+
return loadRunStats(path.join(tmpDir, 'stats.json'));
|
|
22
|
+
}
|
|
23
|
+
function makeDownstreamJob(cronId, threadId) {
|
|
24
|
+
return {
|
|
25
|
+
id: threadId,
|
|
26
|
+
cronId,
|
|
27
|
+
threadId,
|
|
28
|
+
guildId: 'guild-1',
|
|
29
|
+
name: `Job ${cronId}`,
|
|
30
|
+
def: { triggerType: 'schedule', schedule: '0 0 * * *', timezone: 'UTC', channel: 'general', prompt: 'test' },
|
|
31
|
+
cron: null,
|
|
32
|
+
running: false,
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
function makeMinimalCtx(statsStore, overrides) {
|
|
36
|
+
return {
|
|
37
|
+
client: {},
|
|
38
|
+
runtime: { id: 'claude_code', capabilities: new Set(), async *invoke() { } },
|
|
39
|
+
model: 'haiku',
|
|
40
|
+
cwd: '/tmp',
|
|
41
|
+
tools: [],
|
|
42
|
+
timeoutMs: 30_000,
|
|
43
|
+
status: null,
|
|
44
|
+
log: mockLog(),
|
|
45
|
+
discordActionsEnabled: false,
|
|
46
|
+
actionFlags: {
|
|
47
|
+
channels: false, messaging: false, guild: false, moderation: false,
|
|
48
|
+
polls: false, tasks: false, crons: false, botProfile: false,
|
|
49
|
+
forge: false, plan: false, memory: false, config: false, defer: false, voice: false,
|
|
50
|
+
},
|
|
51
|
+
statsStore,
|
|
52
|
+
...overrides,
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
// ---------------------------------------------------------------------------
|
|
56
|
+
// fireChainedJobs
|
|
57
|
+
// ---------------------------------------------------------------------------
|
|
58
|
+
describe('fireChainedJobs', () => {
|
|
59
|
+
it('does nothing when getSchedulerJob is not set', async () => {
|
|
60
|
+
const store = await makeStatsStore();
|
|
61
|
+
await store.upsertRecord('upstream', 'thread-up', { chain: ['downstream'] });
|
|
62
|
+
await store.upsertRecord('downstream', 'thread-down', {});
|
|
63
|
+
const ctx = makeMinimalCtx(store);
|
|
64
|
+
// No getSchedulerJob → early return
|
|
65
|
+
await fireChainedJobs('upstream', ctx);
|
|
66
|
+
const rec = store.getRecord('downstream');
|
|
67
|
+
// State should NOT have been forwarded
|
|
68
|
+
expect(rec?.state).toBeUndefined();
|
|
69
|
+
});
|
|
70
|
+
it('does nothing when statsStore is not set', async () => {
|
|
71
|
+
const ctx = makeMinimalCtx(undefined, { statsStore: undefined });
|
|
72
|
+
// Should not throw
|
|
73
|
+
await fireChainedJobs('upstream', ctx);
|
|
74
|
+
});
|
|
75
|
+
it('does nothing when the upstream job has no chain', async () => {
|
|
76
|
+
const store = await makeStatsStore();
|
|
77
|
+
await store.upsertRecord('upstream', 'thread-up', {});
|
|
78
|
+
const getSchedulerJob = vi.fn();
|
|
79
|
+
const ctx = makeMinimalCtx(store, { getSchedulerJob });
|
|
80
|
+
await fireChainedJobs('upstream', ctx);
|
|
81
|
+
expect(getSchedulerJob).not.toHaveBeenCalled();
|
|
82
|
+
});
|
|
83
|
+
it('does nothing when chain is empty array', async () => {
|
|
84
|
+
const store = await makeStatsStore();
|
|
85
|
+
await store.upsertRecord('upstream', 'thread-up', { chain: [] });
|
|
86
|
+
const getSchedulerJob = vi.fn();
|
|
87
|
+
const ctx = makeMinimalCtx(store, { getSchedulerJob });
|
|
88
|
+
await fireChainedJobs('upstream', ctx);
|
|
89
|
+
expect(getSchedulerJob).not.toHaveBeenCalled();
|
|
90
|
+
});
|
|
91
|
+
it('forwards __upstream state to downstream job', async () => {
|
|
92
|
+
const store = await makeStatsStore();
|
|
93
|
+
const upstreamState = { lastSeenTag: 'v2.3.1', items: [1, 2, 3] };
|
|
94
|
+
await store.upsertRecord('upstream', 'thread-up', {
|
|
95
|
+
chain: ['downstream'],
|
|
96
|
+
state: upstreamState,
|
|
97
|
+
});
|
|
98
|
+
await store.upsertRecord('downstream', 'thread-down', {
|
|
99
|
+
state: { existingKey: 'preserved' },
|
|
100
|
+
});
|
|
101
|
+
const downstreamJob = makeDownstreamJob('downstream', 'thread-down');
|
|
102
|
+
const getSchedulerJob = vi.fn().mockReturnValue(downstreamJob);
|
|
103
|
+
const ctx = makeMinimalCtx(store, { getSchedulerJob });
|
|
104
|
+
await fireChainedJobs('upstream', ctx);
|
|
105
|
+
const rec = store.getRecord('downstream');
|
|
106
|
+
expect(rec?.state).toEqual({
|
|
107
|
+
existingKey: 'preserved',
|
|
108
|
+
__upstream: { fromCronId: 'upstream', state: upstreamState },
|
|
109
|
+
});
|
|
110
|
+
});
|
|
111
|
+
it('forwards empty state as __upstream when upstream has no state', async () => {
|
|
112
|
+
const store = await makeStatsStore();
|
|
113
|
+
await store.upsertRecord('upstream', 'thread-up', { chain: ['downstream'] });
|
|
114
|
+
await store.upsertRecord('downstream', 'thread-down', {});
|
|
115
|
+
const downstreamJob = makeDownstreamJob('downstream', 'thread-down');
|
|
116
|
+
const getSchedulerJob = vi.fn().mockReturnValue(downstreamJob);
|
|
117
|
+
const ctx = makeMinimalCtx(store, { getSchedulerJob });
|
|
118
|
+
await fireChainedJobs('upstream', ctx);
|
|
119
|
+
const rec = store.getRecord('downstream');
|
|
120
|
+
expect(rec?.state).toEqual({
|
|
121
|
+
__upstream: { fromCronId: 'upstream', state: {} },
|
|
122
|
+
});
|
|
123
|
+
});
|
|
124
|
+
it('skips downstream when record is not found', async () => {
|
|
125
|
+
const store = await makeStatsStore();
|
|
126
|
+
await store.upsertRecord('upstream', 'thread-up', { chain: ['nonexistent'] });
|
|
127
|
+
const getSchedulerJob = vi.fn();
|
|
128
|
+
const log = mockLog();
|
|
129
|
+
const ctx = makeMinimalCtx(store, { getSchedulerJob, log });
|
|
130
|
+
await fireChainedJobs('upstream', ctx);
|
|
131
|
+
expect(getSchedulerJob).not.toHaveBeenCalled();
|
|
132
|
+
expect(log.warn).toHaveBeenCalledWith(expect.objectContaining({ downstream: 'nonexistent' }), expect.stringContaining('record not found'));
|
|
133
|
+
});
|
|
134
|
+
it('skips downstream when scheduler job is not found', async () => {
|
|
135
|
+
const store = await makeStatsStore();
|
|
136
|
+
await store.upsertRecord('upstream', 'thread-up', { chain: ['downstream'] });
|
|
137
|
+
await store.upsertRecord('downstream', 'thread-down', {});
|
|
138
|
+
const getSchedulerJob = vi.fn().mockReturnValue(undefined);
|
|
139
|
+
const log = mockLog();
|
|
140
|
+
const ctx = makeMinimalCtx(store, { getSchedulerJob, log });
|
|
141
|
+
await fireChainedJobs('upstream', ctx);
|
|
142
|
+
expect(log.warn).toHaveBeenCalledWith(expect.objectContaining({ downstream: 'downstream' }), expect.stringContaining('scheduler job not found'));
|
|
143
|
+
});
|
|
144
|
+
it('fires multiple downstream jobs independently', async () => {
|
|
145
|
+
const store = await makeStatsStore();
|
|
146
|
+
await store.upsertRecord('upstream', 'thread-up', {
|
|
147
|
+
chain: ['down-a', 'down-b'],
|
|
148
|
+
state: { data: 42 },
|
|
149
|
+
});
|
|
150
|
+
await store.upsertRecord('down-a', 'thread-a', {});
|
|
151
|
+
await store.upsertRecord('down-b', 'thread-b', {});
|
|
152
|
+
const jobA = makeDownstreamJob('down-a', 'thread-a');
|
|
153
|
+
const jobB = makeDownstreamJob('down-b', 'thread-b');
|
|
154
|
+
const getSchedulerJob = vi.fn((threadId) => {
|
|
155
|
+
if (threadId === 'thread-a')
|
|
156
|
+
return jobA;
|
|
157
|
+
if (threadId === 'thread-b')
|
|
158
|
+
return jobB;
|
|
159
|
+
return undefined;
|
|
160
|
+
});
|
|
161
|
+
const log = mockLog();
|
|
162
|
+
const ctx = makeMinimalCtx(store, { getSchedulerJob, log });
|
|
163
|
+
await fireChainedJobs('upstream', ctx);
|
|
164
|
+
// Both should have __upstream state forwarded
|
|
165
|
+
const recA = store.getRecord('down-a');
|
|
166
|
+
const recB = store.getRecord('down-b');
|
|
167
|
+
expect(recA?.state?.__upstream).toEqual({ fromCronId: 'upstream', state: { data: 42 } });
|
|
168
|
+
expect(recB?.state?.__upstream).toEqual({ fromCronId: 'upstream', state: { data: 42 } });
|
|
169
|
+
// Both should have been logged as fired
|
|
170
|
+
expect(log.info).toHaveBeenCalledWith(expect.objectContaining({ downstream: 'down-a' }), expect.stringContaining('downstream fired'));
|
|
171
|
+
expect(log.info).toHaveBeenCalledWith(expect.objectContaining({ downstream: 'down-b' }), expect.stringContaining('downstream fired'));
|
|
172
|
+
});
|
|
173
|
+
it('logs warning and skips downstream when chain depth >= 10', async () => {
|
|
174
|
+
const store = await makeStatsStore();
|
|
175
|
+
await store.upsertRecord('upstream', 'thread-up', { chain: ['downstream'] });
|
|
176
|
+
await store.upsertRecord('downstream', 'thread-down', {});
|
|
177
|
+
const getSchedulerJob = vi.fn();
|
|
178
|
+
const log = mockLog();
|
|
179
|
+
const ctx = makeMinimalCtx(store, { getSchedulerJob, log, chainDepth: 10 });
|
|
180
|
+
await fireChainedJobs('upstream', ctx);
|
|
181
|
+
expect(log.warn).toHaveBeenCalledWith(expect.objectContaining({ cronId: 'upstream', chainDepth: 10 }), expect.stringContaining('depth limit'));
|
|
182
|
+
// getSchedulerJob should never be called — we returned early
|
|
183
|
+
expect(getSchedulerJob).not.toHaveBeenCalled();
|
|
184
|
+
});
|
|
185
|
+
it('increments chain depth for downstream execution context', async () => {
|
|
186
|
+
const store = await makeStatsStore();
|
|
187
|
+
await store.upsertRecord('upstream', 'thread-up', {
|
|
188
|
+
chain: ['downstream'],
|
|
189
|
+
state: { x: 1 },
|
|
190
|
+
});
|
|
191
|
+
await store.upsertRecord('downstream', 'thread-down', {});
|
|
192
|
+
const downstreamJob = makeDownstreamJob('downstream', 'thread-down');
|
|
193
|
+
const getSchedulerJob = vi.fn().mockReturnValue(downstreamJob);
|
|
194
|
+
const log = mockLog();
|
|
195
|
+
const ctx = makeMinimalCtx(store, { getSchedulerJob, log, chainDepth: 5 });
|
|
196
|
+
await fireChainedJobs('upstream', ctx);
|
|
197
|
+
// The function should have fired (depth 5 < 10)
|
|
198
|
+
expect(log.info).toHaveBeenCalledWith(expect.objectContaining({ downstream: 'downstream' }), expect.stringContaining('downstream fired'));
|
|
199
|
+
});
|
|
200
|
+
it('handles state forward failure gracefully', async () => {
|
|
201
|
+
const store = await makeStatsStore();
|
|
202
|
+
await store.upsertRecord('upstream', 'thread-up', {
|
|
203
|
+
chain: ['downstream'],
|
|
204
|
+
state: { data: 1 },
|
|
205
|
+
});
|
|
206
|
+
await store.upsertRecord('downstream', 'thread-down', {});
|
|
207
|
+
// Mock upsertRecord to fail on state forwarding
|
|
208
|
+
const originalUpsert = store.upsertRecord.bind(store);
|
|
209
|
+
let callCount = 0;
|
|
210
|
+
vi.spyOn(store, 'upsertRecord').mockImplementation(async (...args) => {
|
|
211
|
+
callCount++;
|
|
212
|
+
// Fail on the state-forwarding call (the one during fireChainedJobs)
|
|
213
|
+
if (callCount > 0)
|
|
214
|
+
throw new Error('disk full');
|
|
215
|
+
return originalUpsert(...args);
|
|
216
|
+
});
|
|
217
|
+
const downstreamJob = makeDownstreamJob('downstream', 'thread-down');
|
|
218
|
+
const getSchedulerJob = vi.fn().mockReturnValue(downstreamJob);
|
|
219
|
+
const log = mockLog();
|
|
220
|
+
const ctx = makeMinimalCtx(store, { getSchedulerJob, log });
|
|
221
|
+
await fireChainedJobs('upstream', ctx);
|
|
222
|
+
// Should log a warning but still fire downstream
|
|
223
|
+
expect(log.warn).toHaveBeenCalledWith(expect.objectContaining({ downstream: 'downstream' }), expect.stringContaining('state forward failed'));
|
|
224
|
+
expect(log.info).toHaveBeenCalledWith(expect.objectContaining({ downstream: 'downstream' }), expect.stringContaining('downstream fired'));
|
|
225
|
+
});
|
|
226
|
+
});
|
package/dist/cron/cron-prompt.js
CHANGED
|
@@ -12,8 +12,12 @@
|
|
|
12
12
|
* Expand {{channel}} and {{channelId}} placeholders in a cron prompt template.
|
|
13
13
|
* All occurrences are replaced; unrecognized placeholders are left intact.
|
|
14
14
|
*/
|
|
15
|
-
export function expandCronPlaceholders(text, channel, channelId) {
|
|
16
|
-
|
|
15
|
+
export function expandCronPlaceholders(text, channel, channelId, state) {
|
|
16
|
+
const stateJson = JSON.stringify(state ?? {});
|
|
17
|
+
return text
|
|
18
|
+
.replaceAll('{{channel}}', channel)
|
|
19
|
+
.replaceAll('{{channelId}}', channelId)
|
|
20
|
+
.replaceAll('{{state}}', stateJson);
|
|
17
21
|
}
|
|
18
22
|
// ---------------------------------------------------------------------------
|
|
19
23
|
// Prompt body builder
|
|
@@ -29,8 +33,8 @@ export function expandCronPlaceholders(text, channel, channelId) {
|
|
|
29
33
|
* - Silent-mode sentinel instruction
|
|
30
34
|
*/
|
|
31
35
|
export function buildCronPromptBody(input) {
|
|
32
|
-
const { jobName, promptTemplate, channel, channelId = '', silent, routingMode, availableChannels, } = input;
|
|
33
|
-
const expandedPrompt = expandCronPlaceholders(promptTemplate, channel, channelId);
|
|
36
|
+
const { jobName, promptTemplate, channel, channelId = '', silent, routingMode, availableChannels, state, } = input;
|
|
37
|
+
const expandedPrompt = expandCronPlaceholders(promptTemplate, channel, channelId, state);
|
|
34
38
|
const segments = [
|
|
35
39
|
`You are executing a scheduled cron job named "${jobName}".`,
|
|
36
40
|
`Instruction: ${expandedPrompt}`,
|
|
@@ -49,6 +53,24 @@ export function buildCronPromptBody(input) {
|
|
|
49
53
|
segments.push('IMPORTANT: If there is nothing actionable to report, respond with exactly `HEARTBEAT_OK` and nothing else.');
|
|
50
54
|
}
|
|
51
55
|
}
|
|
56
|
+
// Inject persistent state section when state is present and non-empty.
|
|
57
|
+
if (state && Object.keys(state).length > 0) {
|
|
58
|
+
const STATE_CHAR_LIMIT = 4000;
|
|
59
|
+
let serialized = JSON.stringify(state, null, 2);
|
|
60
|
+
if (serialized.length > STATE_CHAR_LIMIT) {
|
|
61
|
+
serialized = serialized.slice(0, STATE_CHAR_LIMIT) + '\n... (state truncated)';
|
|
62
|
+
}
|
|
63
|
+
segments.push([
|
|
64
|
+
'## Persistent State',
|
|
65
|
+
'',
|
|
66
|
+
'The following state was persisted from your previous run:',
|
|
67
|
+
'```json',
|
|
68
|
+
serialized,
|
|
69
|
+
'```',
|
|
70
|
+
'If you need to update the persisted state for the next run, emit a `<cron-state>{...}</cron-state>` block ' +
|
|
71
|
+
'containing a JSON object with the full updated state. The emitted object fully replaces the existing state — include all keys you want to keep. Only emit this block if the state needs to change.',
|
|
72
|
+
].join('\n'));
|
|
73
|
+
}
|
|
52
74
|
return segments.join('\n\n');
|
|
53
75
|
}
|
|
54
76
|
// ---------------------------------------------------------------------------
|
|
@@ -242,3 +242,81 @@ describe('buildCronPromptBody — placeholder expansion', () => {
|
|
|
242
242
|
expect(body).toContain('Channel alerts with ID ch-5.');
|
|
243
243
|
});
|
|
244
244
|
});
|
|
245
|
+
// ---------------------------------------------------------------------------
|
|
246
|
+
// expandCronPlaceholders — {{state}} placeholder
|
|
247
|
+
// ---------------------------------------------------------------------------
|
|
248
|
+
describe('expandCronPlaceholders — {{state}} placeholder', () => {
|
|
249
|
+
it('expands {{state}} to JSON string of provided state', () => {
|
|
250
|
+
const result = expandCronPlaceholders('Current state: {{state}}', 'general', 'ch-1', { counter: 3, lastSeen: '2026-02-28' });
|
|
251
|
+
expect(result).toContain('"counter":3');
|
|
252
|
+
expect(result).toContain('"lastSeen":"2026-02-28"');
|
|
253
|
+
expect(result).not.toContain('{{state}}');
|
|
254
|
+
});
|
|
255
|
+
it('expands {{state}} to empty object JSON when state is undefined', () => {
|
|
256
|
+
const result = expandCronPlaceholders('State: {{state}}', 'general', 'ch-1');
|
|
257
|
+
expect(result).toBe('State: {}');
|
|
258
|
+
});
|
|
259
|
+
it('expands {{state}} to empty object JSON when state is empty', () => {
|
|
260
|
+
const result = expandCronPlaceholders('State: {{state}}', 'general', 'ch-1', {});
|
|
261
|
+
expect(result).toBe('State: {}');
|
|
262
|
+
});
|
|
263
|
+
});
|
|
264
|
+
// ---------------------------------------------------------------------------
|
|
265
|
+
// buildCronPromptBody — persistent state
|
|
266
|
+
// ---------------------------------------------------------------------------
|
|
267
|
+
describe('buildCronPromptBody — persistent state', () => {
|
|
268
|
+
it('includes Persistent State section when state is non-empty', () => {
|
|
269
|
+
const body = buildCronPromptBody({
|
|
270
|
+
jobName: 'Stateful Job',
|
|
271
|
+
promptTemplate: 'Check for updates.',
|
|
272
|
+
channel: 'general',
|
|
273
|
+
state: { lastCheck: '2026-02-27', items: [1, 2, 3] },
|
|
274
|
+
});
|
|
275
|
+
expect(body).toContain('## Persistent State');
|
|
276
|
+
expect(body).toContain('"lastCheck": "2026-02-27"');
|
|
277
|
+
expect(body).toContain('<cron-state>');
|
|
278
|
+
});
|
|
279
|
+
it('omits Persistent State section when state is undefined', () => {
|
|
280
|
+
const body = buildCronPromptBody({
|
|
281
|
+
jobName: 'Stateless Job',
|
|
282
|
+
promptTemplate: 'Say hello.',
|
|
283
|
+
channel: 'general',
|
|
284
|
+
});
|
|
285
|
+
expect(body).not.toContain('Persistent State');
|
|
286
|
+
expect(body).not.toContain('<cron-state>');
|
|
287
|
+
});
|
|
288
|
+
it('omits Persistent State section when state is empty object', () => {
|
|
289
|
+
const body = buildCronPromptBody({
|
|
290
|
+
jobName: 'Empty State Job',
|
|
291
|
+
promptTemplate: 'Say hello.',
|
|
292
|
+
channel: 'general',
|
|
293
|
+
state: {},
|
|
294
|
+
});
|
|
295
|
+
expect(body).not.toContain('Persistent State');
|
|
296
|
+
expect(body).not.toContain('<cron-state>');
|
|
297
|
+
});
|
|
298
|
+
it('truncates very large state objects', () => {
|
|
299
|
+
const largeState = {};
|
|
300
|
+
for (let i = 0; i < 500; i++) {
|
|
301
|
+
largeState[`key_${i}`] = 'x'.repeat(20);
|
|
302
|
+
}
|
|
303
|
+
const body = buildCronPromptBody({
|
|
304
|
+
jobName: 'Large State Job',
|
|
305
|
+
promptTemplate: 'Process data.',
|
|
306
|
+
channel: 'general',
|
|
307
|
+
state: largeState,
|
|
308
|
+
});
|
|
309
|
+
expect(body).toContain('## Persistent State');
|
|
310
|
+
expect(body).toContain('(state truncated)');
|
|
311
|
+
});
|
|
312
|
+
it('includes cron-state emit instruction in the state section', () => {
|
|
313
|
+
const body = buildCronPromptBody({
|
|
314
|
+
jobName: 'Job',
|
|
315
|
+
promptTemplate: 'Do stuff.',
|
|
316
|
+
channel: 'general',
|
|
317
|
+
state: { v: 1 },
|
|
318
|
+
});
|
|
319
|
+
expect(body).toContain('<cron-state>');
|
|
320
|
+
expect(body).toContain('update');
|
|
321
|
+
});
|
|
322
|
+
});
|
package/dist/cron/executor.js
CHANGED
|
@@ -20,6 +20,50 @@ async function recordError(ctx, job, msg) {
|
|
|
20
20
|
}
|
|
21
21
|
}
|
|
22
22
|
}
|
|
23
|
+
const MAX_CHAIN_DEPTH = 10;
|
|
24
|
+
export async function fireChainedJobs(cronId, ctx) {
|
|
25
|
+
const chainDepth = ctx.chainDepth ?? 0;
|
|
26
|
+
if (chainDepth >= MAX_CHAIN_DEPTH) {
|
|
27
|
+
ctx.log?.warn({ cronId, chainDepth }, 'chain:depth limit reached, skipping downstream');
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
if (!ctx.statsStore || !ctx.getSchedulerJob)
|
|
31
|
+
return;
|
|
32
|
+
const record = ctx.statsStore.getRecord(cronId);
|
|
33
|
+
if (!record?.chain || record.chain.length === 0)
|
|
34
|
+
return;
|
|
35
|
+
const upstreamState = record.state;
|
|
36
|
+
for (const downstreamCronId of record.chain) {
|
|
37
|
+
const downstreamRecord = ctx.statsStore.getRecord(downstreamCronId);
|
|
38
|
+
if (!downstreamRecord) {
|
|
39
|
+
ctx.log?.warn({ cronId, downstream: downstreamCronId }, 'chain:downstream record not found, skipping');
|
|
40
|
+
continue;
|
|
41
|
+
}
|
|
42
|
+
const downstreamJob = ctx.getSchedulerJob(downstreamRecord.threadId);
|
|
43
|
+
if (!downstreamJob) {
|
|
44
|
+
ctx.log?.warn({ cronId, downstream: downstreamCronId, threadId: downstreamRecord.threadId }, 'chain:downstream scheduler job not found, skipping');
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
// Merge __upstream into downstream job's persisted state so the prompt's {{state}} includes handoff data.
|
|
48
|
+
try {
|
|
49
|
+
const forwardedState = {
|
|
50
|
+
...(downstreamRecord.state ?? {}),
|
|
51
|
+
__upstream: { fromCronId: cronId, state: upstreamState ?? {} },
|
|
52
|
+
};
|
|
53
|
+
await ctx.statsStore.upsertRecord(downstreamCronId, downstreamRecord.threadId, { state: forwardedState });
|
|
54
|
+
ctx.log?.info({ cronId, downstream: downstreamCronId }, 'chain:state forwarded');
|
|
55
|
+
}
|
|
56
|
+
catch (err) {
|
|
57
|
+
ctx.log?.warn({ err, cronId, downstream: downstreamCronId }, 'chain:state forward failed');
|
|
58
|
+
}
|
|
59
|
+
// Fire-and-forget with incremented chain depth.
|
|
60
|
+
const downstreamCtx = { ...ctx, chainDepth: chainDepth + 1 };
|
|
61
|
+
void executeCronJob(downstreamJob, downstreamCtx).catch((err) => {
|
|
62
|
+
ctx.log?.warn({ err, cronId, downstream: downstreamCronId }, 'chain:downstream execution failed');
|
|
63
|
+
});
|
|
64
|
+
ctx.log?.info({ cronId, downstream: downstreamCronId }, 'chain:downstream fired');
|
|
65
|
+
}
|
|
66
|
+
}
|
|
23
67
|
export async function executeCronJob(job, ctx) {
|
|
24
68
|
const metrics = globalMetrics;
|
|
25
69
|
let cancelRequested = false;
|
|
@@ -110,6 +154,7 @@ export async function executeCronJob(job, ctx) {
|
|
|
110
154
|
channelId: channelForSend.id,
|
|
111
155
|
silent: preRunRecord?.silent,
|
|
112
156
|
routingMode: preRunRecord?.routingMode === 'json' ? 'json' : undefined,
|
|
157
|
+
state: preRunRecord?.state,
|
|
113
158
|
});
|
|
114
159
|
const tools = await resolveEffectiveTools({
|
|
115
160
|
workspaceCwd: ctx.cwd,
|
|
@@ -208,7 +253,31 @@ export async function executeCronJob(job, ctx) {
|
|
|
208
253
|
}
|
|
209
254
|
metrics.recordInvokeResult('cron', Date.now() - t0, true);
|
|
210
255
|
ctx.log?.info({ flow: 'cron', jobId: job.id, ms: Date.now() - t0, ok: true }, 'obs.invoke.end');
|
|
211
|
-
|
|
256
|
+
let output = finalText || deltaText;
|
|
257
|
+
// Extract <cron-state> blocks from the output — last one wins.
|
|
258
|
+
const cronStateRegex = /<cron-state>([\s\S]*?)<\/cron-state>/g;
|
|
259
|
+
let cronStateMatch;
|
|
260
|
+
let lastCronStateJson;
|
|
261
|
+
while ((cronStateMatch = cronStateRegex.exec(output)) !== null) {
|
|
262
|
+
lastCronStateJson = cronStateMatch[1];
|
|
263
|
+
}
|
|
264
|
+
if (lastCronStateJson !== undefined && ctx.statsStore && job.cronId) {
|
|
265
|
+
try {
|
|
266
|
+
const parsedState = JSON.parse(lastCronStateJson.trim());
|
|
267
|
+
if (parsedState && typeof parsedState === 'object' && !Array.isArray(parsedState)) {
|
|
268
|
+
await ctx.statsStore.upsertRecord(job.cronId, job.threadId, { state: parsedState });
|
|
269
|
+
ctx.log?.info({ jobId: job.id, cronId: job.cronId }, 'cron:exec persisted updated state');
|
|
270
|
+
}
|
|
271
|
+
else {
|
|
272
|
+
ctx.log?.warn({ jobId: job.id, cronId: job.cronId }, 'cron:exec <cron-state> was not a JSON object, ignoring');
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
catch (stateErr) {
|
|
276
|
+
ctx.log?.warn({ err: stateErr, jobId: job.id, cronId: job.cronId }, 'cron:exec <cron-state> parse failed, ignoring');
|
|
277
|
+
}
|
|
278
|
+
// Strip all <cron-state> blocks from the output text.
|
|
279
|
+
output = output.replace(/<cron-state>[\s\S]*?<\/cron-state>/g, '').trim();
|
|
280
|
+
}
|
|
212
281
|
if (!output.trim() && collectedImages.length === 0) {
|
|
213
282
|
metrics.increment('cron.run.skipped');
|
|
214
283
|
ctx.log?.warn({ jobId: job.id }, 'cron:exec empty output');
|
|
@@ -219,6 +288,7 @@ export async function executeCronJob(job, ctx) {
|
|
|
219
288
|
catch {
|
|
220
289
|
// Best-effort.
|
|
221
290
|
}
|
|
291
|
+
void fireChainedJobs(job.cronId, ctx);
|
|
222
292
|
}
|
|
223
293
|
return;
|
|
224
294
|
}
|
|
@@ -310,6 +380,7 @@ export async function executeCronJob(job, ctx) {
|
|
|
310
380
|
catch {
|
|
311
381
|
// Best-effort.
|
|
312
382
|
}
|
|
383
|
+
void fireChainedJobs(job.cronId, ctx);
|
|
313
384
|
}
|
|
314
385
|
metrics.increment('cron.run.success');
|
|
315
386
|
return;
|
|
@@ -326,6 +397,7 @@ export async function executeCronJob(job, ctx) {
|
|
|
326
397
|
catch {
|
|
327
398
|
// Best-effort.
|
|
328
399
|
}
|
|
400
|
+
void fireChainedJobs(job.cronId, ctx);
|
|
329
401
|
}
|
|
330
402
|
metrics.increment('cron.run.success');
|
|
331
403
|
return;
|
|
@@ -366,6 +438,7 @@ export async function executeCronJob(job, ctx) {
|
|
|
366
438
|
catch (statsErr) {
|
|
367
439
|
ctx.log?.warn({ err: statsErr, jobId: job.id }, 'cron:exec stats record failed');
|
|
368
440
|
}
|
|
441
|
+
void fireChainedJobs(job.cronId, ctx);
|
|
369
442
|
}
|
|
370
443
|
}
|
|
371
444
|
catch (err) {
|
|
@@ -1003,3 +1003,105 @@ describe('executeCronJob allowedActions filtering', () => {
|
|
|
1003
1003
|
executeDiscordActionsSpy.mockRestore();
|
|
1004
1004
|
});
|
|
1005
1005
|
});
|
|
1006
|
+
// ---------------------------------------------------------------------------
|
|
1007
|
+
// <cron-state> extraction and persistence
|
|
1008
|
+
// ---------------------------------------------------------------------------
|
|
1009
|
+
describe('executeCronJob cron-state extraction', () => {
|
|
1010
|
+
let statsDir;
|
|
1011
|
+
beforeEach(async () => {
|
|
1012
|
+
statsDir = await fs.mkdtemp(path.join(os.tmpdir(), 'executor-cron-state-'));
|
|
1013
|
+
});
|
|
1014
|
+
afterEach(async () => {
|
|
1015
|
+
await fs.rm(statsDir, { recursive: true, force: true });
|
|
1016
|
+
});
|
|
1017
|
+
it('extracts <cron-state> block and persists state via upsertRecord', async () => {
|
|
1018
|
+
const statsPath = path.join(statsDir, 'stats.json');
|
|
1019
|
+
const statsStore = await loadRunStats(statsPath);
|
|
1020
|
+
await statsStore.upsertRecord('cron-test0001', 'thread-1');
|
|
1021
|
+
const response = 'Here is the report.\n<cron-state>{"lastSeen":"2026-02-28","count":5}</cron-state>';
|
|
1022
|
+
const ctx = makeCtx({ statsStore, runtime: makeMockRuntime(response) });
|
|
1023
|
+
const job = makeJob();
|
|
1024
|
+
await executeCronJob(job, ctx);
|
|
1025
|
+
const rec = statsStore.getRecord('cron-test0001');
|
|
1026
|
+
expect(rec.state).toEqual({ lastSeen: '2026-02-28', count: 5 });
|
|
1027
|
+
});
|
|
1028
|
+
it('strips <cron-state> blocks from the output sent to Discord', async () => {
|
|
1029
|
+
const statsPath = path.join(statsDir, 'stats.json');
|
|
1030
|
+
const statsStore = await loadRunStats(statsPath);
|
|
1031
|
+
await statsStore.upsertRecord('cron-test0001', 'thread-1');
|
|
1032
|
+
const response = 'Report content here.\n<cron-state>{"v":1}</cron-state>';
|
|
1033
|
+
const ctx = makeCtx({ statsStore, runtime: makeMockRuntime(response) });
|
|
1034
|
+
const job = makeJob();
|
|
1035
|
+
await executeCronJob(job, ctx);
|
|
1036
|
+
const guild = ctx.client.guilds.cache.get('guild-1');
|
|
1037
|
+
const channel = guild.channels.cache.get('general');
|
|
1038
|
+
expect(channel.send).toHaveBeenCalled();
|
|
1039
|
+
const sentContent = channel.send.mock.calls[0][0].content;
|
|
1040
|
+
expect(sentContent).not.toContain('<cron-state>');
|
|
1041
|
+
expect(sentContent).toContain('Report content here.');
|
|
1042
|
+
});
|
|
1043
|
+
it('uses the last <cron-state> block when multiple are present', async () => {
|
|
1044
|
+
const statsPath = path.join(statsDir, 'stats.json');
|
|
1045
|
+
const statsStore = await loadRunStats(statsPath);
|
|
1046
|
+
await statsStore.upsertRecord('cron-test0001', 'thread-1');
|
|
1047
|
+
const response = [
|
|
1048
|
+
'Part 1.',
|
|
1049
|
+
'<cron-state>{"v":1}</cron-state>',
|
|
1050
|
+
'Part 2.',
|
|
1051
|
+
'<cron-state>{"v":2,"final":true}</cron-state>',
|
|
1052
|
+
].join('\n');
|
|
1053
|
+
const ctx = makeCtx({ statsStore, runtime: makeMockRuntime(response) });
|
|
1054
|
+
const job = makeJob();
|
|
1055
|
+
await executeCronJob(job, ctx);
|
|
1056
|
+
const rec = statsStore.getRecord('cron-test0001');
|
|
1057
|
+
expect(rec.state).toEqual({ v: 2, final: true });
|
|
1058
|
+
});
|
|
1059
|
+
it('ignores invalid JSON in <cron-state> block gracefully', async () => {
|
|
1060
|
+
const statsPath = path.join(statsDir, 'stats.json');
|
|
1061
|
+
const statsStore = await loadRunStats(statsPath);
|
|
1062
|
+
await statsStore.upsertRecord('cron-test0001', 'thread-1', { state: { old: true } });
|
|
1063
|
+
const response = 'Output here.\n<cron-state>not valid json</cron-state>';
|
|
1064
|
+
const ctx = makeCtx({ statsStore, runtime: makeMockRuntime(response) });
|
|
1065
|
+
const job = makeJob();
|
|
1066
|
+
await executeCronJob(job, ctx);
|
|
1067
|
+
// State should remain unchanged after invalid parse.
|
|
1068
|
+
const rec = statsStore.getRecord('cron-test0001');
|
|
1069
|
+
expect(rec.state).toEqual({ old: true });
|
|
1070
|
+
expect(ctx.log?.warn).toHaveBeenCalledWith(expect.objectContaining({ jobId: 'thread-1' }), 'cron:exec <cron-state> parse failed, ignoring');
|
|
1071
|
+
});
|
|
1072
|
+
it('ignores non-object JSON in <cron-state> block (e.g. array)', async () => {
|
|
1073
|
+
const statsPath = path.join(statsDir, 'stats.json');
|
|
1074
|
+
const statsStore = await loadRunStats(statsPath);
|
|
1075
|
+
await statsStore.upsertRecord('cron-test0001', 'thread-1');
|
|
1076
|
+
const response = 'Output here.\n<cron-state>[1,2,3]</cron-state>';
|
|
1077
|
+
const ctx = makeCtx({ statsStore, runtime: makeMockRuntime(response) });
|
|
1078
|
+
const job = makeJob();
|
|
1079
|
+
await executeCronJob(job, ctx);
|
|
1080
|
+
const rec = statsStore.getRecord('cron-test0001');
|
|
1081
|
+
expect(rec.state).toBeUndefined();
|
|
1082
|
+
expect(ctx.log?.warn).toHaveBeenCalledWith(expect.objectContaining({ jobId: 'thread-1' }), 'cron:exec <cron-state> was not a JSON object, ignoring');
|
|
1083
|
+
});
|
|
1084
|
+
it('passes existing state to the prompt via preRunRecord', async () => {
|
|
1085
|
+
const statsPath = path.join(statsDir, 'stats.json');
|
|
1086
|
+
const statsStore = await loadRunStats(statsPath);
|
|
1087
|
+
await statsStore.upsertRecord('cron-test0001', 'thread-1', {
|
|
1088
|
+
state: { counter: 42, lastItem: 'xyz' },
|
|
1089
|
+
});
|
|
1090
|
+
let capturedPrompt = '';
|
|
1091
|
+
const runtime = {
|
|
1092
|
+
id: 'claude_code',
|
|
1093
|
+
capabilities: new Set(['streaming_text']),
|
|
1094
|
+
async *invoke(opts) {
|
|
1095
|
+
capturedPrompt = opts.prompt;
|
|
1096
|
+
yield { type: 'text_final', text: 'Done.' };
|
|
1097
|
+
yield { type: 'done' };
|
|
1098
|
+
},
|
|
1099
|
+
};
|
|
1100
|
+
const ctx = makeCtx({ statsStore, runtime });
|
|
1101
|
+
const job = makeJob();
|
|
1102
|
+
await executeCronJob(job, ctx);
|
|
1103
|
+
expect(capturedPrompt).toContain('Persistent State');
|
|
1104
|
+
expect(capturedPrompt).toContain('"counter": 42');
|
|
1105
|
+
expect(capturedPrompt).toContain('"lastItem": "xyz"');
|
|
1106
|
+
});
|
|
1107
|
+
});
|
package/dist/cron/run-stats.js
CHANGED
|
@@ -5,7 +5,7 @@ import crypto from 'node:crypto';
|
|
|
5
5
|
// Types
|
|
6
6
|
// ---------------------------------------------------------------------------
|
|
7
7
|
export const CADENCE_TAGS = ['yearly', 'frequent', 'hourly', 'daily', 'weekly', 'monthly'];
|
|
8
|
-
export const CURRENT_VERSION =
|
|
8
|
+
export const CURRENT_VERSION = 10;
|
|
9
9
|
// ---------------------------------------------------------------------------
|
|
10
10
|
// Stable Cron ID generation
|
|
11
11
|
// ---------------------------------------------------------------------------
|
|
@@ -135,6 +135,9 @@ export class CronRunStats {
|
|
|
135
135
|
if ('allowedActions' in updates && updates.allowedActions === undefined) {
|
|
136
136
|
delete existing.allowedActions;
|
|
137
137
|
}
|
|
138
|
+
if ('chain' in updates && updates.chain === undefined) {
|
|
139
|
+
delete existing.chain;
|
|
140
|
+
}
|
|
138
141
|
}
|
|
139
142
|
existing.threadId = threadId;
|
|
140
143
|
if (prevStatusMessageId && prevStatusMessageId !== existing.statusMessageId) {
|
|
@@ -317,5 +320,13 @@ export async function loadRunStats(filePath) {
|
|
|
317
320
|
if (store.version === 7) {
|
|
318
321
|
store.version = 8;
|
|
319
322
|
}
|
|
323
|
+
// Migrate v8 → v9: no-op — new field (state) is optional and defaults to absent.
|
|
324
|
+
if (store.version === 8) {
|
|
325
|
+
store.version = 9;
|
|
326
|
+
}
|
|
327
|
+
// Migrate v9 → v10: no-op — new field (chain) is optional and defaults to absent.
|
|
328
|
+
if (store.version === 9) {
|
|
329
|
+
store.version = 10;
|
|
330
|
+
}
|
|
320
331
|
return new CronRunStats(store, filePath);
|
|
321
332
|
}
|
|
@@ -38,7 +38,7 @@ describe('CronRunStats', () => {
|
|
|
38
38
|
it('creates empty store on missing file', async () => {
|
|
39
39
|
const stats = await loadRunStats(statsPath);
|
|
40
40
|
const store = stats.getStore();
|
|
41
|
-
expect(store.version).toBe(
|
|
41
|
+
expect(store.version).toBe(10);
|
|
42
42
|
expect(Object.keys(store.jobs)).toHaveLength(0);
|
|
43
43
|
});
|
|
44
44
|
it('upserts and retrieves records by cronId', async () => {
|
|
@@ -92,6 +92,37 @@ describe('CronRunStats', () => {
|
|
|
92
92
|
expect(rec.allowedActions).toBeUndefined();
|
|
93
93
|
expect('allowedActions' in rec).toBe(false);
|
|
94
94
|
});
|
|
95
|
+
it('upserts with state and retrieves it', async () => {
|
|
96
|
+
const stats = await loadRunStats(statsPath);
|
|
97
|
+
const stateObj = { lastSeen: '2026-02-28', counter: 5 };
|
|
98
|
+
const rec = await stats.upsertRecord('cron-st1', 'thread-st1', { state: stateObj });
|
|
99
|
+
expect(rec.state).toEqual(stateObj);
|
|
100
|
+
const fetched = stats.getRecord('cron-st1');
|
|
101
|
+
expect(fetched.state).toEqual(stateObj);
|
|
102
|
+
});
|
|
103
|
+
it('persists state through disk reload', async () => {
|
|
104
|
+
const stats = await loadRunStats(statsPath);
|
|
105
|
+
const stateObj = { items: ['a', 'b'], processed: true };
|
|
106
|
+
await stats.upsertRecord('cron-st2', 'thread-st2', { state: stateObj });
|
|
107
|
+
const stats2 = await loadRunStats(statsPath);
|
|
108
|
+
const rec = stats2.getRecord('cron-st2');
|
|
109
|
+
expect(rec).toBeDefined();
|
|
110
|
+
expect(rec.state).toEqual(stateObj);
|
|
111
|
+
});
|
|
112
|
+
it('replaces state entirely on subsequent upsert', async () => {
|
|
113
|
+
const stats = await loadRunStats(statsPath);
|
|
114
|
+
await stats.upsertRecord('cron-st3', 'thread-st3', { state: { v: 1, old: true } });
|
|
115
|
+
await stats.upsertRecord('cron-st3', 'thread-st3', { state: { v: 2, new: true } });
|
|
116
|
+
const rec = stats.getRecord('cron-st3');
|
|
117
|
+
expect(rec.state).toEqual({ v: 2, new: true });
|
|
118
|
+
expect(rec.state).not.toHaveProperty('old');
|
|
119
|
+
});
|
|
120
|
+
it('defaults to no state when none is provided', async () => {
|
|
121
|
+
const stats = await loadRunStats(statsPath);
|
|
122
|
+
await stats.upsertRecord('cron-st4', 'thread-st4');
|
|
123
|
+
const rec = stats.getRecord('cron-st4');
|
|
124
|
+
expect(rec.state).toBeUndefined();
|
|
125
|
+
});
|
|
95
126
|
it('retrieves records by threadId', async () => {
|
|
96
127
|
const stats = await loadRunStats(statsPath);
|
|
97
128
|
await stats.upsertRecord('cron-a', 'thread-100');
|
|
@@ -244,7 +275,7 @@ describe('CronRunStats', () => {
|
|
|
244
275
|
describe('emptyStore', () => {
|
|
245
276
|
it('returns valid initial structure', () => {
|
|
246
277
|
const store = emptyStore();
|
|
247
|
-
expect(store.version).toBe(
|
|
278
|
+
expect(store.version).toBe(10);
|
|
248
279
|
expect(store.updatedAt).toBeGreaterThan(0);
|
|
249
280
|
expect(Object.keys(store.jobs)).toHaveLength(0);
|
|
250
281
|
});
|
|
@@ -271,7 +302,7 @@ describe('loadRunStats version migration', () => {
|
|
|
271
302
|
};
|
|
272
303
|
await fs.writeFile(statsPath, JSON.stringify(v3Store), 'utf-8');
|
|
273
304
|
const stats = await loadRunStats(statsPath);
|
|
274
|
-
expect(stats.getStore().version).toBe(
|
|
305
|
+
expect(stats.getStore().version).toBe(10);
|
|
275
306
|
const rec = stats.getRecord('cron-migrated');
|
|
276
307
|
expect(rec).toBeDefined();
|
|
277
308
|
expect(rec.cronId).toBe('cron-migrated');
|
|
@@ -301,7 +332,7 @@ describe('loadRunStats version migration', () => {
|
|
|
301
332
|
};
|
|
302
333
|
await fs.writeFile(statsPath, JSON.stringify(v4Store), 'utf-8');
|
|
303
334
|
const stats = await loadRunStats(statsPath);
|
|
304
|
-
expect(stats.getStore().version).toBe(
|
|
335
|
+
expect(stats.getStore().version).toBe(10);
|
|
305
336
|
const rec = stats.getRecord('cron-v4');
|
|
306
337
|
expect(rec).toBeDefined();
|
|
307
338
|
expect(rec.cronId).toBe('cron-v4');
|
|
@@ -331,7 +362,7 @@ describe('loadRunStats version migration', () => {
|
|
|
331
362
|
};
|
|
332
363
|
await fs.writeFile(statsPath, JSON.stringify(v5Store), 'utf-8');
|
|
333
364
|
const stats = await loadRunStats(statsPath);
|
|
334
|
-
expect(stats.getStore().version).toBe(
|
|
365
|
+
expect(stats.getStore().version).toBe(10);
|
|
335
366
|
const rec = stats.getRecord('cron-v5');
|
|
336
367
|
expect(rec).toBeDefined();
|
|
337
368
|
expect(rec.cronId).toBe('cron-v5');
|
|
@@ -344,6 +375,37 @@ describe('loadRunStats version migration', () => {
|
|
|
344
375
|
expect(rec.prompt).toBeUndefined();
|
|
345
376
|
expect(rec.authorId).toBeUndefined();
|
|
346
377
|
});
|
|
378
|
+
it('migrates a v8 store to v9 with state undefined on existing records', async () => {
|
|
379
|
+
const v8Store = {
|
|
380
|
+
version: 8,
|
|
381
|
+
updatedAt: Date.now(),
|
|
382
|
+
jobs: {
|
|
383
|
+
'cron-v8': {
|
|
384
|
+
cronId: 'cron-v8',
|
|
385
|
+
threadId: 'thread-v8',
|
|
386
|
+
runCount: 4,
|
|
387
|
+
lastRunAt: '2026-02-01T00:00:00.000Z',
|
|
388
|
+
lastRunStatus: 'success',
|
|
389
|
+
cadence: 'daily',
|
|
390
|
+
purposeTags: ['delta'],
|
|
391
|
+
disabled: false,
|
|
392
|
+
model: 'sonnet',
|
|
393
|
+
triggerType: 'schedule',
|
|
394
|
+
silent: true,
|
|
395
|
+
routingMode: 'default',
|
|
396
|
+
channel: 'general',
|
|
397
|
+
},
|
|
398
|
+
},
|
|
399
|
+
};
|
|
400
|
+
await fs.writeFile(statsPath, JSON.stringify(v8Store), 'utf-8');
|
|
401
|
+
const stats = await loadRunStats(statsPath);
|
|
402
|
+
expect(stats.getStore().version).toBe(10);
|
|
403
|
+
const rec = stats.getRecord('cron-v8');
|
|
404
|
+
expect(rec).toBeDefined();
|
|
405
|
+
expect(rec.cronId).toBe('cron-v8');
|
|
406
|
+
expect(rec.runCount).toBe(4);
|
|
407
|
+
expect(rec.state).toBeUndefined();
|
|
408
|
+
});
|
|
347
409
|
it('migrates a v6 store to v7 with routingMode and allowedActions undefined on existing records', async () => {
|
|
348
410
|
const v6Store = {
|
|
349
411
|
version: 6,
|
|
@@ -367,7 +429,7 @@ describe('loadRunStats version migration', () => {
|
|
|
367
429
|
};
|
|
368
430
|
await fs.writeFile(statsPath, JSON.stringify(v6Store), 'utf-8');
|
|
369
431
|
const stats = await loadRunStats(statsPath);
|
|
370
|
-
expect(stats.getStore().version).toBe(
|
|
432
|
+
expect(stats.getStore().version).toBe(10);
|
|
371
433
|
const rec = stats.getRecord('cron-v6');
|
|
372
434
|
expect(rec).toBeDefined();
|
|
373
435
|
expect(rec.cronId).toBe('cron-v6');
|
|
@@ -89,6 +89,53 @@ function requestRunningJobCancel(cronCtx, threadId, cronId) {
|
|
|
89
89
|
}
|
|
90
90
|
return canceled;
|
|
91
91
|
}
|
|
92
|
+
/**
|
|
93
|
+
* Parse and validate a comma-separated chain string. Returns parsed cronIds or an error message.
|
|
94
|
+
*/
|
|
95
|
+
function parseAndValidateChain(chainStr, statsStore, selfCronId) {
|
|
96
|
+
if (chainStr === '') {
|
|
97
|
+
return { ids: [] };
|
|
98
|
+
}
|
|
99
|
+
const ids = chainStr.split(',').map((s) => s.trim()).filter(Boolean);
|
|
100
|
+
if (ids.length === 0) {
|
|
101
|
+
return { error: 'chain requires at least one cronId if provided' };
|
|
102
|
+
}
|
|
103
|
+
// Validate each cronId exists.
|
|
104
|
+
const missing = ids.filter((id) => !statsStore.getRecord(id));
|
|
105
|
+
if (missing.length > 0) {
|
|
106
|
+
return { error: `chain contains unknown cronIds: ${missing.join(', ')}` };
|
|
107
|
+
}
|
|
108
|
+
// No self-referencing.
|
|
109
|
+
if (selfCronId && ids.includes(selfCronId)) {
|
|
110
|
+
return { error: 'chain cannot reference itself' };
|
|
111
|
+
}
|
|
112
|
+
return { ids };
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* Detect cycles in the chain graph. Returns true if adding the proposed chain
|
|
116
|
+
* to `cronId` would create a cycle.
|
|
117
|
+
*/
|
|
118
|
+
function detectChainCycle(cronId, proposedChain, statsStore) {
|
|
119
|
+
// BFS from each downstream job, checking if we can reach cronId.
|
|
120
|
+
const visited = new Set();
|
|
121
|
+
const queue = [...proposedChain];
|
|
122
|
+
while (queue.length > 0) {
|
|
123
|
+
const current = queue.shift();
|
|
124
|
+
if (current === cronId)
|
|
125
|
+
return true;
|
|
126
|
+
if (visited.has(current))
|
|
127
|
+
continue;
|
|
128
|
+
visited.add(current);
|
|
129
|
+
const rec = statsStore.getRecord(current);
|
|
130
|
+
if (rec?.chain) {
|
|
131
|
+
for (const next of rec.chain) {
|
|
132
|
+
if (!visited.has(next))
|
|
133
|
+
queue.push(next);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
return false;
|
|
138
|
+
}
|
|
92
139
|
// ---------------------------------------------------------------------------
|
|
93
140
|
// Executor
|
|
94
141
|
// ---------------------------------------------------------------------------
|
|
@@ -119,6 +166,19 @@ export async function executeCronAction(action, ctx, cronCtx) {
|
|
|
119
166
|
}
|
|
120
167
|
parsedAllowedActions = parts;
|
|
121
168
|
}
|
|
169
|
+
// Validate chain if provided.
|
|
170
|
+
let parsedChain;
|
|
171
|
+
if (action.chain !== undefined) {
|
|
172
|
+
const chainResult = parseAndValidateChain(action.chain, cronCtx.statsStore);
|
|
173
|
+
if ('error' in chainResult) {
|
|
174
|
+
return { ok: false, error: chainResult.error };
|
|
175
|
+
}
|
|
176
|
+
if (chainResult.ids.length > 0) {
|
|
177
|
+
// No cycle detection needed on create — this job doesn't exist yet so
|
|
178
|
+
// no other job can reference it as a downstream target.
|
|
179
|
+
parsedChain = chainResult.ids;
|
|
180
|
+
}
|
|
181
|
+
}
|
|
122
182
|
// Create forum thread.
|
|
123
183
|
const forum = await resolveForumChannel(cronCtx.client, cronCtx.forumId);
|
|
124
184
|
if (!forum) {
|
|
@@ -209,6 +269,7 @@ export async function executeCronAction(action, ctx, cronCtx) {
|
|
|
209
269
|
authorId: cronCtx.client.user?.id,
|
|
210
270
|
...(action.routingMode ? { routingMode: action.routingMode } : {}),
|
|
211
271
|
...(parsedAllowedActions !== undefined && { allowedActions: parsedAllowedActions }),
|
|
272
|
+
...(parsedChain !== undefined && { chain: parsedChain }),
|
|
212
273
|
});
|
|
213
274
|
// Create status message.
|
|
214
275
|
try {
|
|
@@ -232,7 +293,7 @@ export async function executeCronAction(action, ctx, cronCtx) {
|
|
|
232
293
|
cronCtx.log?.warn({ err, cronId }, 'cron:action:create prompt message failed');
|
|
233
294
|
}
|
|
234
295
|
cronCtx.forumCountSync?.requestUpdate();
|
|
235
|
-
return { ok: true, summary: `Cron "${action.name}" created (${cronId}), schedule: ${action.schedule}, model: ${model}${action.routingMode ? `, routing: ${action.routingMode}` : ''}` };
|
|
296
|
+
return { ok: true, summary: `Cron "${action.name}" created (${cronId}), schedule: ${action.schedule}, model: ${model}${action.routingMode ? `, routing: ${action.routingMode}` : ''}${parsedChain ? `, chain: ${parsedChain.join(', ')}` : ''}` };
|
|
236
297
|
}
|
|
237
298
|
case 'cronUpdate': {
|
|
238
299
|
if (!action.cronId) {
|
|
@@ -290,6 +351,39 @@ export async function executeCronAction(action, ctx, cronCtx) {
|
|
|
290
351
|
changes.push(`allowedActions → ${parts.join(', ')}`);
|
|
291
352
|
}
|
|
292
353
|
}
|
|
354
|
+
// Chain override.
|
|
355
|
+
if (action.chain !== undefined) {
|
|
356
|
+
if (action.chain === '') {
|
|
357
|
+
updates.chain = undefined;
|
|
358
|
+
changes.push('chain cleared');
|
|
359
|
+
}
|
|
360
|
+
else {
|
|
361
|
+
const chainResult = parseAndValidateChain(action.chain, cronCtx.statsStore, action.cronId);
|
|
362
|
+
if ('error' in chainResult) {
|
|
363
|
+
return { ok: false, error: chainResult.error };
|
|
364
|
+
}
|
|
365
|
+
if (detectChainCycle(action.cronId, chainResult.ids, cronCtx.statsStore)) {
|
|
366
|
+
return { ok: false, error: 'chain would create a cycle' };
|
|
367
|
+
}
|
|
368
|
+
updates.chain = chainResult.ids;
|
|
369
|
+
changes.push(`chain → ${chainResult.ids.join(', ')}`);
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
// State override (manual JSON manipulation).
|
|
373
|
+
if (action.state !== undefined) {
|
|
374
|
+
let parsed;
|
|
375
|
+
try {
|
|
376
|
+
parsed = JSON.parse(action.state);
|
|
377
|
+
}
|
|
378
|
+
catch {
|
|
379
|
+
return { ok: false, error: 'state must be valid JSON' };
|
|
380
|
+
}
|
|
381
|
+
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
|
382
|
+
return { ok: false, error: 'state must be a JSON object' };
|
|
383
|
+
}
|
|
384
|
+
updates.state = parsed;
|
|
385
|
+
changes.push('state updated');
|
|
386
|
+
}
|
|
293
387
|
// Definition changes (schedule, timezone, channel, prompt).
|
|
294
388
|
const newSchedule = action.schedule ?? job.def.schedule ?? '';
|
|
295
389
|
const newTimezone = action.timezone ?? job.def.timezone;
|
|
@@ -446,7 +540,8 @@ export async function executeCronAction(action, ctx, cronCtx) {
|
|
|
446
540
|
const tags = record?.purposeTags?.join(', ') || '';
|
|
447
541
|
const nextRun = j.nextRun ? `<t:${Math.floor(j.nextRun.getTime() / 1000)}:R>` : 'N/A';
|
|
448
542
|
const cronId = fullJob?.cronId ?? '?';
|
|
449
|
-
|
|
543
|
+
const chained = record?.chain && record.chain.length > 0 ? ' | chained' : '';
|
|
544
|
+
return `\`${cronId}\` **${j.name}** | \`${j.schedule}\` | ${displayStatus} | ${model} | ${runs} runs | next: ${nextRun}${tags ? ` | ${tags}` : ''}${chained}`;
|
|
450
545
|
});
|
|
451
546
|
return { ok: true, summary: lines.join('\n') };
|
|
452
547
|
}
|
|
@@ -484,8 +579,20 @@ export async function executeCronAction(action, ctx, cronCtx) {
|
|
|
484
579
|
lines.push(`Tags: ${record.purposeTags.join(', ')}`);
|
|
485
580
|
if (record.allowedActions && record.allowedActions.length > 0)
|
|
486
581
|
lines.push(`Allowed actions: ${record.allowedActions.join(', ')}`);
|
|
582
|
+
if (record.chain && record.chain.length > 0) {
|
|
583
|
+
const chainEntries = record.chain.map((id) => {
|
|
584
|
+
const downstream = cronCtx.statsStore.getRecord(id);
|
|
585
|
+
const downstreamJob = downstream ? cronCtx.scheduler.getJob(downstream.threadId) : undefined;
|
|
586
|
+
return `\`${id}\`${downstreamJob ? ` (${downstreamJob.name})` : ''}`;
|
|
587
|
+
});
|
|
588
|
+
lines.push(`Chain: ${chainEntries.join(', ')}`);
|
|
589
|
+
}
|
|
487
590
|
if (record.lastErrorMessage)
|
|
488
591
|
lines.push(`Last error: ${record.lastErrorMessage}`);
|
|
592
|
+
if (record.state && Object.keys(record.state).length > 0) {
|
|
593
|
+
const stateJson = JSON.stringify(record.state);
|
|
594
|
+
lines.push(`State: ${stateJson.length > 500 ? stateJson.slice(0, 500) + '... (truncated)' : stateJson}`);
|
|
595
|
+
}
|
|
489
596
|
// Return full prompt text — prefer the persisted record prompt (always full),
|
|
490
597
|
// falling back to the scheduler def (also full).
|
|
491
598
|
const promptText = record.prompt ?? job?.def.prompt;
|
|
@@ -723,6 +830,7 @@ export function cronActionsPromptSection() {
|
|
|
723
830
|
- \`model\` (optional): "fast", "capable", or "deep" (auto-classified if omitted).
|
|
724
831
|
- \`routingMode\` (optional): Set to \`"json"\` to enable JSON routing mode. In this mode the executor uses the JSON router to dispatch structured responses. The prompt may contain \`{{channel}}\` and \`{{channelId}}\` placeholders which are expanded to the target channel name and ID at runtime.
|
|
725
832
|
- \`allowedActions\` (optional): Comma-separated list of Discord action types this job may emit (e.g., "cronList,cronShow"). Restricts the AI to only these action types during execution. Rejects unrecognized type names. Requires at least one entry if provided.
|
|
833
|
+
- \`chain\` (optional): Comma-separated cronIds of downstream jobs to trigger on successful completion (e.g., "cron-a1b2c3d4,cron-e5f6g7h8"). Creates a multi-step pipeline — the completed job's persisted state is forwarded to downstream jobs. Referenced cronIds must exist. Cycles are rejected.
|
|
726
834
|
|
|
727
835
|
**cronUpdate** — Update a cron's settings:
|
|
728
836
|
\`\`\`
|
|
@@ -733,6 +841,8 @@ export function cronActionsPromptSection() {
|
|
|
733
841
|
- \`silent\` (optional): Boolean. When true, suppresses short "nothing to report" responses.
|
|
734
842
|
- \`routingMode\` (optional): Set to \`"json"\` to enable JSON routing mode, or omit/pass empty string to clear.
|
|
735
843
|
- \`allowedActions\` (optional): Update the allowed action types list. Empty string clears the restriction.
|
|
844
|
+
- \`chain\` (optional): Update downstream pipeline jobs (comma-separated cronIds). Empty string clears the chain. Cycles are detected and rejected.
|
|
845
|
+
- \`state\` (optional): JSON string to replace the job's persistent state object (e.g., \`"{\\"cursor\\":\\"abc\\"}"\`). Must be a JSON object. Used for manual state manipulation; normally state is managed by the job itself.
|
|
736
846
|
|
|
737
847
|
**cronList** — List all cron jobs:
|
|
738
848
|
\`\`\`
|
|
@@ -663,4 +663,52 @@ describe('executeCronAction', () => {
|
|
|
663
663
|
expect(result.summary).not.toContain('Allowed actions:');
|
|
664
664
|
}
|
|
665
665
|
});
|
|
666
|
+
it('cronUpdate with valid state JSON persists it', async () => {
|
|
667
|
+
const cronCtx = makeCronCtx();
|
|
668
|
+
const result = await executeCronAction({ type: 'cronUpdate', cronId: 'cron-test0001', state: '{"cursor":"abc","count":5}' }, makeActionCtx(), cronCtx);
|
|
669
|
+
expect(result.ok).toBe(true);
|
|
670
|
+
expect(cronCtx.statsStore.upsertRecord).toHaveBeenCalledWith('cron-test0001', 'thread-1', expect.objectContaining({ state: { cursor: 'abc', count: 5 } }));
|
|
671
|
+
});
|
|
672
|
+
it('cronUpdate with invalid state JSON returns error', async () => {
|
|
673
|
+
const cronCtx = makeCronCtx();
|
|
674
|
+
const result = await executeCronAction({ type: 'cronUpdate', cronId: 'cron-test0001', state: 'not-json' }, makeActionCtx(), cronCtx);
|
|
675
|
+
expect(result.ok).toBe(false);
|
|
676
|
+
if (!result.ok)
|
|
677
|
+
expect(result.error).toContain('valid JSON');
|
|
678
|
+
});
|
|
679
|
+
it('cronUpdate with non-object state JSON returns error', async () => {
|
|
680
|
+
const cronCtx = makeCronCtx();
|
|
681
|
+
const result = await executeCronAction({ type: 'cronUpdate', cronId: 'cron-test0001', state: '[1,2,3]' }, makeActionCtx(), cronCtx);
|
|
682
|
+
expect(result.ok).toBe(false);
|
|
683
|
+
if (!result.ok)
|
|
684
|
+
expect(result.error).toContain('JSON object');
|
|
685
|
+
});
|
|
686
|
+
it('cronUpdate with empty object state clears state', async () => {
|
|
687
|
+
const cronCtx = makeCronCtx({
|
|
688
|
+
statsStore: makeStatsStore([makeRecord({ state: { old: 'value' } })]),
|
|
689
|
+
});
|
|
690
|
+
const result = await executeCronAction({ type: 'cronUpdate', cronId: 'cron-test0001', state: '{}' }, makeActionCtx(), cronCtx);
|
|
691
|
+
expect(result.ok).toBe(true);
|
|
692
|
+
expect(cronCtx.statsStore.upsertRecord).toHaveBeenCalledWith('cron-test0001', 'thread-1', expect.objectContaining({ state: {} }));
|
|
693
|
+
});
|
|
694
|
+
it('cronShow includes state when present', async () => {
|
|
695
|
+
const cronCtx = makeCronCtx({
|
|
696
|
+
statsStore: makeStatsStore([makeRecord({ state: { cursor: 'xyz', count: 10 } })]),
|
|
697
|
+
});
|
|
698
|
+
const result = await executeCronAction({ type: 'cronShow', cronId: 'cron-test0001' }, makeActionCtx(), cronCtx);
|
|
699
|
+
expect(result.ok).toBe(true);
|
|
700
|
+
if (result.ok) {
|
|
701
|
+
expect(result.summary).toContain('State:');
|
|
702
|
+
expect(result.summary).toContain('"cursor"');
|
|
703
|
+
expect(result.summary).toContain('"xyz"');
|
|
704
|
+
}
|
|
705
|
+
});
|
|
706
|
+
it('cronShow omits state when empty', async () => {
|
|
707
|
+
const cronCtx = makeCronCtx();
|
|
708
|
+
const result = await executeCronAction({ type: 'cronShow', cronId: 'cron-test0001' }, makeActionCtx(), cronCtx);
|
|
709
|
+
expect(result.ok).toBe(true);
|
|
710
|
+
if (result.ok) {
|
|
711
|
+
expect(result.summary).not.toContain('State:');
|
|
712
|
+
}
|
|
713
|
+
});
|
|
666
714
|
});
|
|
@@ -97,7 +97,8 @@ export async function handleUpdateCommand(cmd, opts = {}) {
|
|
|
97
97
|
}
|
|
98
98
|
if (npmMode) {
|
|
99
99
|
progress('Installing latest version from npm...');
|
|
100
|
-
const
|
|
100
|
+
const npmEnv = { ...process.env, CFLAGS: '-Wno-incompatible-pointer-types' };
|
|
101
|
+
const install = await run('npm', ['install', '-g', 'discoclaw@latest', '--loglevel=error'], { timeout: 120_000, env: npmEnv });
|
|
101
102
|
if (install.exitCode !== 0) {
|
|
102
103
|
const detail = (install.stderr || install.stdout).trim().slice(0, 500);
|
|
103
104
|
return { reply: `\`npm install -g discoclaw@latest\` failed:\n\`\`\`\n${detail}\n\`\`\`` };
|
package/dist/index.js
CHANGED
|
@@ -1428,6 +1428,7 @@ if (cronEnabled && effectiveCronForum) {
|
|
|
1428
1428
|
statsStore: cronStats,
|
|
1429
1429
|
lockDir: cronLocksDir,
|
|
1430
1430
|
runControl: cronRunControl,
|
|
1431
|
+
getSchedulerJob: (threadId) => cronScheduler.getJob(threadId),
|
|
1431
1432
|
};
|
|
1432
1433
|
savedCronExecCtx = cronExecCtx;
|
|
1433
1434
|
cronCtx.executorCtx = cronExecCtx;
|
package/dist/npm-managed.js
CHANGED
|
@@ -50,6 +50,7 @@ export async function npmGlobalUpgrade() {
|
|
|
50
50
|
try {
|
|
51
51
|
const result = await execa('npm', ['install', '-g', 'discoclaw', '--loglevel=error'], {
|
|
52
52
|
timeout: 120_000,
|
|
53
|
+
env: { ...process.env, CFLAGS: '-Wno-incompatible-pointer-types' },
|
|
53
54
|
});
|
|
54
55
|
return {
|
|
55
56
|
exitCode: result.exitCode ?? 0,
|
package/dist/npm-managed.test.js
CHANGED
|
@@ -83,6 +83,7 @@ describe('npmGlobalUpgrade', () => {
|
|
|
83
83
|
expect(result.stderr).toBe('');
|
|
84
84
|
expect(mockExeca).toHaveBeenCalledWith('npm', ['install', '-g', 'discoclaw', '--loglevel=error'], {
|
|
85
85
|
timeout: 120_000,
|
|
86
|
+
env: expect.objectContaining({ CFLAGS: '-Wno-incompatible-pointer-types' }),
|
|
86
87
|
});
|
|
87
88
|
});
|
|
88
89
|
it('returns a non-zero exitCode and stderr when npm install fails', async () => {
|