agent-pool-mcp 1.0.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +20 -3
- package/index.js +1 -1
- package/package.json +4 -2
- package/src/scheduler/cron.js +84 -0
- package/src/scheduler/daemon.js +430 -0
- package/src/scheduler/pipeline.js +354 -0
- package/src/scheduler/scheduler.js +207 -0
- package/src/server.js +269 -1
- package/src/tool-definitions.js +168 -0
|
@@ -0,0 +1,354 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pipeline management — CRUD for pipeline definitions and run state.
|
|
3
|
+
*
|
|
4
|
+
* Pipelines are stored as JSON templates in .agent/pipelines/.
|
|
5
|
+
* Each execution creates a run state in .agent/runs/.
|
|
6
|
+
*
|
|
7
|
+
* @module agent-pool/scheduler/pipeline
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync, readdirSync, unlinkSync } from 'node:fs';
|
|
11
|
+
import { join, dirname } from 'node:path';
|
|
12
|
+
import { randomUUID } from 'node:crypto';
|
|
13
|
+
import { ensureDaemon } from './scheduler.js';
|
|
14
|
+
|
|
15
|
+
const PIPELINES_DIR = '.agent/pipelines';
|
|
16
|
+
const RUNS_DIR = '.agent/runs';
|
|
17
|
+
|
|
18
|
+
// ─── Helpers ────────────────────────────────────────────────
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Normalize trigger from various input formats to canonical form.
|
|
22
|
+
* Handles: objects, 'start', step name strings, or missing values.
|
|
23
|
+
* @param {*} trigger - Raw trigger value from user input
|
|
24
|
+
* @param {number} i - Step index
|
|
25
|
+
* @param {Array} steps - All steps array
|
|
26
|
+
* @returns {string|object} Normalized trigger
|
|
27
|
+
*/
|
|
28
|
+
function normalizeTrigger(trigger, i, steps) {
|
|
29
|
+
// No trigger → first step starts, others depend on previous
|
|
30
|
+
if (!trigger) {
|
|
31
|
+
return i === 0 ? 'start' : { type: 'on_complete', step: steps[i - 1].name };
|
|
32
|
+
}
|
|
33
|
+
// Already a proper object → keep as-is
|
|
34
|
+
if (typeof trigger === 'object' && trigger.type) {
|
|
35
|
+
return trigger;
|
|
36
|
+
}
|
|
37
|
+
// 'start' → first step
|
|
38
|
+
if (trigger === 'start') {
|
|
39
|
+
return 'start';
|
|
40
|
+
}
|
|
41
|
+
// Plain string → treat as on_complete dependency on that step name
|
|
42
|
+
if (typeof trigger === 'string') {
|
|
43
|
+
return { type: 'on_complete', step: trigger };
|
|
44
|
+
}
|
|
45
|
+
// Fallback
|
|
46
|
+
return i === 0 ? 'start' : { type: 'on_complete', step: steps[i - 1].name };
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// ─── Pipeline CRUD ──────────────────────────────────────────
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Create a pipeline definition.
|
|
53
|
+
* @param {string} cwd
|
|
54
|
+
* @param {object} opts
|
|
55
|
+
* @param {string} opts.name
|
|
56
|
+
* @param {Array<object>} opts.steps
|
|
57
|
+
* @param {string} [opts.onError] - 'stop' (default) | 'skip'
|
|
58
|
+
* @returns {{ pipelineId: string, path: string }}
|
|
59
|
+
*/
|
|
60
|
+
export function createPipeline(cwd, { name, steps, onError }) {
|
|
61
|
+
const dir = join(cwd, PIPELINES_DIR);
|
|
62
|
+
mkdirSync(dir, { recursive: true });
|
|
63
|
+
|
|
64
|
+
const id = name.toLowerCase().replace(/[^a-z0-9-]/g, '-');
|
|
65
|
+
const pipeline = {
|
|
66
|
+
id,
|
|
67
|
+
name,
|
|
68
|
+
steps: steps.map((s, i) => ({
|
|
69
|
+
name: s.name,
|
|
70
|
+
prompt: s.prompt,
|
|
71
|
+
skill: s.skill || null,
|
|
72
|
+
approvalMode: s.approval_mode || 'yolo',
|
|
73
|
+
timeout: s.timeout || 600,
|
|
74
|
+
maxBounces: s.maxBounces ?? s.max_bounces ?? 2,
|
|
75
|
+
trigger: normalizeTrigger(s.trigger, i, steps),
|
|
76
|
+
expectedOutput: s.expectedOutput || s.expected_output || null,
|
|
77
|
+
})),
|
|
78
|
+
onError: onError || 'stop',
|
|
79
|
+
createdAt: new Date().toISOString(),
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
const filePath = join(dir, `${id}.json`);
|
|
83
|
+
writeFileSync(filePath, JSON.stringify(pipeline, null, 2));
|
|
84
|
+
return { pipelineId: id, path: filePath };
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* List all pipeline definitions.
|
|
89
|
+
* @param {string} cwd
|
|
90
|
+
* @returns {Array<object>}
|
|
91
|
+
*/
|
|
92
|
+
export function listPipelines(cwd) {
|
|
93
|
+
const dir = join(cwd, PIPELINES_DIR);
|
|
94
|
+
if (!existsSync(dir)) return [];
|
|
95
|
+
return readdirSync(dir)
|
|
96
|
+
.filter(f => f.endsWith('.json'))
|
|
97
|
+
.map(f => {
|
|
98
|
+
try { return JSON.parse(readFileSync(join(dir, f), 'utf-8')); }
|
|
99
|
+
catch { return null; }
|
|
100
|
+
})
|
|
101
|
+
.filter(Boolean);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Get a pipeline definition by ID.
|
|
106
|
+
* @param {string} cwd
|
|
107
|
+
* @param {string} pipelineId
|
|
108
|
+
* @returns {object|null}
|
|
109
|
+
*/
|
|
110
|
+
export function getPipeline(cwd, pipelineId) {
|
|
111
|
+
const filePath = join(cwd, PIPELINES_DIR, `${pipelineId}.json`);
|
|
112
|
+
if (!existsSync(filePath)) return null;
|
|
113
|
+
try { return JSON.parse(readFileSync(filePath, 'utf-8')); }
|
|
114
|
+
catch { return null; }
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// ─── Run Management ─────────────────────────────────────────
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Start a pipeline run. Creates run state and starts first step.
|
|
121
|
+
* @param {string} cwd
|
|
122
|
+
* @param {string} pipelineId
|
|
123
|
+
* @returns {{ runId: string } | null}
|
|
124
|
+
*/
|
|
125
|
+
export function runPipeline(cwd, pipelineId) {
|
|
126
|
+
const pipeline = getPipeline(cwd, pipelineId);
|
|
127
|
+
if (!pipeline) return null;
|
|
128
|
+
|
|
129
|
+
const dir = join(cwd, RUNS_DIR);
|
|
130
|
+
mkdirSync(dir, { recursive: true });
|
|
131
|
+
|
|
132
|
+
const runId = randomUUID().split('-')[0];
|
|
133
|
+
const steps = {};
|
|
134
|
+
for (const step of pipeline.steps) {
|
|
135
|
+
steps[step.name] = {
|
|
136
|
+
status: 'pending',
|
|
137
|
+
pid: null,
|
|
138
|
+
exitCode: null,
|
|
139
|
+
signaled: false,
|
|
140
|
+
bounces: 0,
|
|
141
|
+
lastBounceReason: null,
|
|
142
|
+
startedAt: null,
|
|
143
|
+
completedAt: null,
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
const run = {
|
|
148
|
+
id: runId,
|
|
149
|
+
pipeline: pipelineId,
|
|
150
|
+
pipelineName: pipeline.name,
|
|
151
|
+
status: 'running',
|
|
152
|
+
cwd,
|
|
153
|
+
startedAt: new Date().toISOString(),
|
|
154
|
+
completedAt: null,
|
|
155
|
+
steps,
|
|
156
|
+
};
|
|
157
|
+
|
|
158
|
+
writeFileSync(join(dir, `${runId}.json`), JSON.stringify(run, null, 2));
|
|
159
|
+
|
|
160
|
+
// Ensure daemon is running to process pipeline ticks
|
|
161
|
+
ensureDaemon(cwd);
|
|
162
|
+
|
|
163
|
+
return { runId };
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
/**
|
|
167
|
+
* Read a run state.
|
|
168
|
+
* @param {string} cwd
|
|
169
|
+
* @param {string} runId
|
|
170
|
+
* @returns {object|null}
|
|
171
|
+
*/
|
|
172
|
+
export function getRun(cwd, runId) {
|
|
173
|
+
const filePath = join(cwd, RUNS_DIR, `${runId}.json`);
|
|
174
|
+
if (!existsSync(filePath)) return null;
|
|
175
|
+
try { return JSON.parse(readFileSync(filePath, 'utf-8')); }
|
|
176
|
+
catch { return null; }
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Update a run state.
|
|
181
|
+
* @param {string} cwd
|
|
182
|
+
* @param {string} runId
|
|
183
|
+
* @param {object} run
|
|
184
|
+
*/
|
|
185
|
+
export function saveRun(cwd, runId, run) {
|
|
186
|
+
const dir = join(cwd, RUNS_DIR);
|
|
187
|
+
mkdirSync(dir, { recursive: true });
|
|
188
|
+
writeFileSync(join(dir, `${runId}.json`), JSON.stringify(run, null, 2));
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
/**
|
|
192
|
+
* List all runs (optionally filter by pipeline).
|
|
193
|
+
* @param {string} cwd
|
|
194
|
+
* @param {string} [pipelineId]
|
|
195
|
+
* @returns {Array<object>}
|
|
196
|
+
*/
|
|
197
|
+
export function listRuns(cwd, pipelineId) {
|
|
198
|
+
const dir = join(cwd, RUNS_DIR);
|
|
199
|
+
if (!existsSync(dir)) return [];
|
|
200
|
+
return readdirSync(dir)
|
|
201
|
+
.filter(f => f.endsWith('.json'))
|
|
202
|
+
.map(f => {
|
|
203
|
+
try { return JSON.parse(readFileSync(join(dir, f), 'utf-8')); }
|
|
204
|
+
catch { return null; }
|
|
205
|
+
})
|
|
206
|
+
.filter(r => r && (!pipelineId || r.pipeline === pipelineId))
|
|
207
|
+
.sort((a, b) => (b.startedAt || '').localeCompare(a.startedAt || ''));
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
/**
|
|
211
|
+
* Cancel a pipeline run.
|
|
212
|
+
* @param {string} cwd
|
|
213
|
+
* @param {string} runId
|
|
214
|
+
* @returns {boolean}
|
|
215
|
+
*/
|
|
216
|
+
export function cancelRun(cwd, runId) {
|
|
217
|
+
const run = getRun(cwd, runId);
|
|
218
|
+
if (!run || run.status !== 'running') return false;
|
|
219
|
+
|
|
220
|
+
// Kill any running step
|
|
221
|
+
for (const [name, step] of Object.entries(run.steps)) {
|
|
222
|
+
if (step.status === 'running' && step.pid) {
|
|
223
|
+
try { process.kill(step.pid, 'SIGTERM'); } catch { /* already dead */ }
|
|
224
|
+
step.status = 'cancelled';
|
|
225
|
+
}
|
|
226
|
+
if (step.status === 'pending') {
|
|
227
|
+
step.status = 'skipped';
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
run.status = 'cancelled';
|
|
231
|
+
run.completedAt = new Date().toISOString();
|
|
232
|
+
saveRun(cwd, runId, run);
|
|
233
|
+
return true;
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
// ─── Signal Handling ────────────────────────────────────────
|
|
237
|
+
|
|
238
|
+
/**
|
|
239
|
+
* Find active run containing a step name.
|
|
240
|
+
* @param {string} cwd
|
|
241
|
+
* @param {string} stepName
|
|
242
|
+
* @returns {{ run: object, runId: string } | null}
|
|
243
|
+
*/
|
|
244
|
+
export function findActiveRunByStep(cwd, stepName) {
|
|
245
|
+
const dir = join(cwd, RUNS_DIR);
|
|
246
|
+
if (!existsSync(dir)) return null;
|
|
247
|
+
|
|
248
|
+
for (const f of readdirSync(dir).filter(f => f.endsWith('.json'))) {
|
|
249
|
+
try {
|
|
250
|
+
const run = JSON.parse(readFileSync(join(dir, f), 'utf-8'));
|
|
251
|
+
if (run.status === 'running' && run.steps[stepName]) {
|
|
252
|
+
return { run, runId: f.replace('.json', '') };
|
|
253
|
+
}
|
|
254
|
+
} catch { /* skip */ }
|
|
255
|
+
}
|
|
256
|
+
return null;
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
/**
|
|
260
|
+
* Signal step completion. Called by agent via MCP tool.
|
|
261
|
+
* @param {string} cwd
|
|
262
|
+
* @param {string} stepName
|
|
263
|
+
* @param {string} [output]
|
|
264
|
+
* @param {string} [runId] - Specific run ID (recommended)
|
|
265
|
+
* @returns {{ success: boolean, nextStep?: string }}
|
|
266
|
+
*/
|
|
267
|
+
export function signalStepComplete(cwd, stepName, output, runId) {
|
|
268
|
+
let run, resolvedRunId;
|
|
269
|
+
|
|
270
|
+
if (runId) {
|
|
271
|
+
// Direct lookup by run ID
|
|
272
|
+
run = getRun(cwd, runId);
|
|
273
|
+
resolvedRunId = runId;
|
|
274
|
+
} else {
|
|
275
|
+
// Fallback: search by step name
|
|
276
|
+
const found = findActiveRunByStep(cwd, stepName);
|
|
277
|
+
if (!found) return { success: false };
|
|
278
|
+
run = found.run;
|
|
279
|
+
resolvedRunId = found.runId;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
if (!run || run.status !== 'running') return { success: false };
|
|
283
|
+
const step = run.steps[stepName];
|
|
284
|
+
if (!step || step.status !== 'running') return { success: false };
|
|
285
|
+
|
|
286
|
+
step.status = 'success';
|
|
287
|
+
step.signaled = true;
|
|
288
|
+
step.completedAt = new Date().toISOString();
|
|
289
|
+
if (output) step.output = output;
|
|
290
|
+
|
|
291
|
+
saveRun(cwd, resolvedRunId, run);
|
|
292
|
+
return { success: true };
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
/**
|
|
296
|
+
* Bounce back to a previous step. Called by agent via MCP tool.
|
|
297
|
+
* @param {string} cwd
|
|
298
|
+
* @param {string} targetStepName - Step to re-run
|
|
299
|
+
* @param {string} reason - Why bouncing back
|
|
300
|
+
* @param {string} [runId] - Specific run ID (recommended)
|
|
301
|
+
* @returns {{ success: boolean, bounceCount?: number, maxBounces?: number }}
|
|
302
|
+
*/
|
|
303
|
+
export function bounceBack(cwd, targetStepName, reason, runId) {
|
|
304
|
+
// Find active run where the caller is running
|
|
305
|
+
const dir = join(cwd, RUNS_DIR);
|
|
306
|
+
if (!existsSync(dir)) return { success: false };
|
|
307
|
+
|
|
308
|
+
for (const f of readdirSync(dir).filter(f => f.endsWith('.json'))) {
|
|
309
|
+
try {
|
|
310
|
+
const run = JSON.parse(readFileSync(join(dir, f), 'utf-8'));
|
|
311
|
+
if (run.status !== 'running') continue;
|
|
312
|
+
|
|
313
|
+
const targetStep = run.steps[targetStepName];
|
|
314
|
+
if (!targetStep) continue;
|
|
315
|
+
|
|
316
|
+
// Find the pipeline definition for maxBounces
|
|
317
|
+
const pipeline = getPipeline(run.cwd || cwd, run.pipeline);
|
|
318
|
+
const stepDef = pipeline?.steps.find(s => s.name === targetStepName);
|
|
319
|
+
const maxBounces = stepDef?.maxBounces ?? 2;
|
|
320
|
+
|
|
321
|
+
if (targetStep.bounces >= maxBounces) {
|
|
322
|
+
// Bounce limit reached — fail pipeline
|
|
323
|
+
targetStep.status = 'failed';
|
|
324
|
+
targetStep.lastBounceReason = `Bounce limit (${maxBounces}) reached. Last: ${reason}`;
|
|
325
|
+
run.status = 'failed';
|
|
326
|
+
run.completedAt = new Date().toISOString();
|
|
327
|
+
saveRun(cwd, f.replace('.json', ''), run);
|
|
328
|
+
return { success: false, bounceCount: targetStep.bounces, maxBounces };
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
// Reset target step to pending with bounce info
|
|
332
|
+
targetStep.status = 'bounce_pending';
|
|
333
|
+
targetStep.bounces += 1;
|
|
334
|
+
targetStep.lastBounceReason = reason;
|
|
335
|
+
targetStep.pid = null;
|
|
336
|
+
targetStep.exitCode = null;
|
|
337
|
+
targetStep.signaled = false;
|
|
338
|
+
|
|
339
|
+
// Reset the calling step too
|
|
340
|
+
const callingStepName = Object.keys(run.steps).find(name => {
|
|
341
|
+
const s = run.steps[name];
|
|
342
|
+
return s.status === 'running';
|
|
343
|
+
});
|
|
344
|
+
if (callingStepName) {
|
|
345
|
+
run.steps[callingStepName].status = 'waiting_bounce';
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
saveRun(cwd, f.replace('.json', ''), run);
|
|
349
|
+
return { success: true, bounceCount: targetStep.bounces, maxBounces };
|
|
350
|
+
} catch { /* skip */ }
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
return { success: false };
|
|
354
|
+
}
|
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Schedule management — CRUD operations for schedule.json
|
|
3
|
+
* and daemon lifecycle control.
|
|
4
|
+
*
|
|
5
|
+
* @module agent-pool/scheduler/scheduler
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync, readdirSync, unlinkSync } from 'node:fs';
|
|
9
|
+
import { spawn } from 'node:child_process';
|
|
10
|
+
import { join, dirname } from 'node:path';
|
|
11
|
+
import { randomUUID } from 'node:crypto';
|
|
12
|
+
import { fileURLToPath } from 'node:url';
|
|
13
|
+
import { nextCronRun } from './cron.js';
|
|
14
|
+
|
|
15
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
16
|
+
const DAEMON_SCRIPT = join(__dirname, 'daemon.js');
|
|
17
|
+
|
|
18
|
+
const SCHEDULE_FILE = '.agent/schedule.json';
|
|
19
|
+
const RESULTS_DIR = '.agent/scheduled-results';
|
|
20
|
+
const PID_FILE = '.agent/scheduler.pid';
|
|
21
|
+
|
|
22
|
+
// ─── Schedule CRUD ──────────────────────────────────────────
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Read all schedules.
|
|
26
|
+
* @param {string} cwd
|
|
27
|
+
* @returns {Array<object>}
|
|
28
|
+
*/
|
|
29
|
+
export function readSchedules(cwd) {
|
|
30
|
+
const filePath = join(cwd, SCHEDULE_FILE);
|
|
31
|
+
if (!existsSync(filePath)) return [];
|
|
32
|
+
try {
|
|
33
|
+
return JSON.parse(readFileSync(filePath, 'utf-8'));
|
|
34
|
+
} catch {
|
|
35
|
+
return [];
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Write schedules to file.
|
|
41
|
+
* @param {string} cwd
|
|
42
|
+
* @param {Array<object>} schedules
|
|
43
|
+
*/
|
|
44
|
+
function writeSchedules(cwd, schedules) {
|
|
45
|
+
const filePath = join(cwd, SCHEDULE_FILE);
|
|
46
|
+
mkdirSync(dirname(filePath), { recursive: true });
|
|
47
|
+
writeFileSync(filePath, JSON.stringify(schedules, null, 2));
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Add a new schedule and ensure daemon is running.
|
|
52
|
+
*
|
|
53
|
+
* @param {string} cwd - Working directory
|
|
54
|
+
* @param {object} opts
|
|
55
|
+
* @param {string} opts.prompt - Task prompt
|
|
56
|
+
* @param {string} opts.cron - Cron expression (5-field)
|
|
57
|
+
* @param {string} [opts.skill] - Skill to activate
|
|
58
|
+
* @param {string} [opts.approvalMode] - yolo | auto_edit | plan
|
|
59
|
+
* @param {boolean} [opts.catchup] - Run missed schedules on restart
|
|
60
|
+
* @param {string} [opts.taskCwd] - Working directory for the task
|
|
61
|
+
* @returns {{ scheduleId: string, nextRun: string | null }}
|
|
62
|
+
*/
|
|
63
|
+
export function addSchedule(cwd, { prompt, cron, skill, approvalMode, catchup, taskCwd }) {
|
|
64
|
+
const schedules = readSchedules(cwd);
|
|
65
|
+
const id = randomUUID().split('-')[0]; // short ID
|
|
66
|
+
const schedule = {
|
|
67
|
+
id,
|
|
68
|
+
prompt,
|
|
69
|
+
cron,
|
|
70
|
+
cwd: taskCwd || cwd,
|
|
71
|
+
skill: skill || null,
|
|
72
|
+
approvalMode: approvalMode || 'yolo',
|
|
73
|
+
catchup: catchup ?? false,
|
|
74
|
+
lastRun: null,
|
|
75
|
+
createdAt: new Date().toISOString(),
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
schedules.push(schedule);
|
|
79
|
+
writeSchedules(cwd, schedules);
|
|
80
|
+
|
|
81
|
+
// Ensure daemon is running
|
|
82
|
+
ensureDaemon(cwd);
|
|
83
|
+
|
|
84
|
+
const next = nextCronRun(cron, new Date());
|
|
85
|
+
|
|
86
|
+
return {
|
|
87
|
+
scheduleId: id,
|
|
88
|
+
nextRun: next ? next.toISOString() : null,
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Remove a schedule by ID.
|
|
94
|
+
* @param {string} cwd
|
|
95
|
+
* @param {string} scheduleId
|
|
96
|
+
* @returns {boolean} true if found and removed
|
|
97
|
+
*/
|
|
98
|
+
export function removeSchedule(cwd, scheduleId) {
|
|
99
|
+
const schedules = readSchedules(cwd);
|
|
100
|
+
const idx = schedules.findIndex((s) => s.id === scheduleId);
|
|
101
|
+
if (idx === -1) return false;
|
|
102
|
+
schedules.splice(idx, 1);
|
|
103
|
+
writeSchedules(cwd, schedules);
|
|
104
|
+
return true;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* List schedules with next run time.
|
|
109
|
+
* @param {string} cwd
|
|
110
|
+
* @returns {Array<object>}
|
|
111
|
+
*/
|
|
112
|
+
export function listSchedules(cwd) {
|
|
113
|
+
const schedules = readSchedules(cwd);
|
|
114
|
+
return schedules.map((s) => {
|
|
115
|
+
const next = nextCronRun(s.cron, new Date());
|
|
116
|
+
return {
|
|
117
|
+
...s,
|
|
118
|
+
nextRun: next ? next.toISOString() : null,
|
|
119
|
+
};
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
// ─── Results ────────────────────────────────────────────────
|
|
124
|
+
|
|
125
|
+
/**
|
|
126
|
+
* Get results for a schedule (or all).
|
|
127
|
+
* @param {string} cwd
|
|
128
|
+
* @param {string} [scheduleId] - Filter by schedule ID
|
|
129
|
+
* @returns {Array<object>}
|
|
130
|
+
*/
|
|
131
|
+
export function getScheduledResults(cwd, scheduleId) {
|
|
132
|
+
const dir = join(cwd, RESULTS_DIR);
|
|
133
|
+
if (!existsSync(dir)) return [];
|
|
134
|
+
|
|
135
|
+
const files = readdirSync(dir).filter((f) => f.endsWith('.json'));
|
|
136
|
+
const results = [];
|
|
137
|
+
|
|
138
|
+
for (const file of files.slice(-20)) { // Last 20 results
|
|
139
|
+
try {
|
|
140
|
+
const data = JSON.parse(readFileSync(join(dir, file), 'utf-8'));
|
|
141
|
+
if (!scheduleId || data.scheduleId === scheduleId) {
|
|
142
|
+
results.push(data);
|
|
143
|
+
}
|
|
144
|
+
} catch { /* skip corrupt files */ }
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
return results.sort((a, b) => b.executedAt.localeCompare(a.executedAt));
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// ─── Daemon lifecycle ───────────────────────────────────────
|
|
151
|
+
|
|
152
|
+
/**
|
|
153
|
+
* Check if scheduler daemon is running.
|
|
154
|
+
* @param {string} cwd
|
|
155
|
+
* @returns {{ running: boolean, pid: number | null }}
|
|
156
|
+
*/
|
|
157
|
+
export function getDaemonStatus(cwd) {
|
|
158
|
+
const pidPath = join(cwd, PID_FILE);
|
|
159
|
+
if (!existsSync(pidPath)) return { running: false, pid: null };
|
|
160
|
+
|
|
161
|
+
try {
|
|
162
|
+
const pid = parseInt(readFileSync(pidPath, 'utf-8').trim());
|
|
163
|
+
process.kill(pid, 0); // Check if alive (signal 0 = no-op)
|
|
164
|
+
return { running: true, pid };
|
|
165
|
+
} catch {
|
|
166
|
+
// Stale PID file
|
|
167
|
+
try { unlinkSync(pidPath); } catch { /* ignore */ }
|
|
168
|
+
return { running: false, pid: null };
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
/**
|
|
173
|
+
* Start the daemon if not already running.
|
|
174
|
+
* @param {string} cwd
|
|
175
|
+
* @returns {{ started: boolean, pid: number | null }}
|
|
176
|
+
*/
|
|
177
|
+
export function ensureDaemon(cwd) {
|
|
178
|
+
const status = getDaemonStatus(cwd);
|
|
179
|
+
if (status.running) return { started: false, pid: status.pid };
|
|
180
|
+
|
|
181
|
+
const child = spawn('node', [DAEMON_SCRIPT, cwd], {
|
|
182
|
+
cwd,
|
|
183
|
+
detached: true,
|
|
184
|
+
stdio: 'ignore',
|
|
185
|
+
env: { ...process.env },
|
|
186
|
+
});
|
|
187
|
+
child.unref();
|
|
188
|
+
|
|
189
|
+
return { started: true, pid: child.pid };
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
/**
|
|
193
|
+
* Stop the daemon.
|
|
194
|
+
* @param {string} cwd
|
|
195
|
+
* @returns {boolean} true if stopped
|
|
196
|
+
*/
|
|
197
|
+
export function stopDaemon(cwd) {
|
|
198
|
+
const status = getDaemonStatus(cwd);
|
|
199
|
+
if (!status.running) return false;
|
|
200
|
+
|
|
201
|
+
try {
|
|
202
|
+
process.kill(status.pid, 'SIGTERM');
|
|
203
|
+
return true;
|
|
204
|
+
} catch {
|
|
205
|
+
return false;
|
|
206
|
+
}
|
|
207
|
+
}
|