agent-pool-mcp 1.0.1 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +1 -1
- package/package.json +3 -1
- package/src/scheduler/cron.js +84 -0
- package/src/scheduler/daemon.js +430 -0
- package/src/scheduler/pipeline.js +354 -0
- package/src/scheduler/scheduler.js +207 -0
- package/src/server.js +269 -1
- package/src/tool-definitions.js +168 -0
package/index.js
CHANGED
|
@@ -33,6 +33,6 @@ async function startServer() {
|
|
|
33
33
|
const server = createServer();
|
|
34
34
|
const transport = new StdioServerTransport();
|
|
35
35
|
await server.connect(transport);
|
|
36
|
-
console.error('[agent-pool] MCP server v1.0
|
|
36
|
+
console.error('[agent-pool] MCP server v1.2.0 started');
|
|
37
37
|
}
|
|
38
38
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "agent-pool-mcp",
|
|
3
|
-
"version": "1.0
|
|
3
|
+
"version": "1.2.0",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "MCP Server for multi-agent task delegation and orchestration via Gemini CLI",
|
|
6
6
|
"main": "index.js",
|
|
@@ -27,6 +27,8 @@
|
|
|
27
27
|
"multi-agent",
|
|
28
28
|
"delegation",
|
|
29
29
|
"orchestration",
|
|
30
|
+
"scheduler",
|
|
31
|
+
"cron",
|
|
30
32
|
"ai"
|
|
31
33
|
],
|
|
32
34
|
"author": "Vladislav Matiyasevich",
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Minimal cron expression parser.
|
|
3
|
+
* Supports: star, star/N (step), N, N-M (range), N,M (list), MON-FRI for weekdays (0=Sun, 6=Sat).
|
|
4
|
+
* 5 fields: minute hour day month weekday
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
const MONTHS = { JAN: 1, FEB: 2, MAR: 3, APR: 4, MAY: 5, JUN: 6, JUL: 7, AUG: 8, SEP: 9, OCT: 10, NOV: 11, DEC: 12 };
|
|
8
|
+
const DAYS = { SUN: 0, MON: 1, TUE: 2, WED: 3, THU: 4, FRI: 5, SAT: 6 };
|
|
9
|
+
|
|
10
|
+
function parseField(value, target, min, max, aliases) {
|
|
11
|
+
if (value === '*') return true;
|
|
12
|
+
const parts = value.split(',');
|
|
13
|
+
for (let part of parts) {
|
|
14
|
+
let step = 1;
|
|
15
|
+
let range = part;
|
|
16
|
+
if (part.includes('/')) {
|
|
17
|
+
const split = part.split('/');
|
|
18
|
+
range = split[0];
|
|
19
|
+
step = parseInt(split[1], 10);
|
|
20
|
+
if (isNaN(step)) return false;
|
|
21
|
+
}
|
|
22
|
+
if (range === '*') {
|
|
23
|
+
if ((target - min) % step === 0) return true;
|
|
24
|
+
continue;
|
|
25
|
+
}
|
|
26
|
+
let start, end;
|
|
27
|
+
if (range.includes('-')) {
|
|
28
|
+
const bounds = range.split('-');
|
|
29
|
+
start = aliases[bounds[0].toUpperCase()] ?? parseInt(bounds[0], 10);
|
|
30
|
+
end = aliases[bounds[1].toUpperCase()] ?? parseInt(bounds[1], 10);
|
|
31
|
+
} else {
|
|
32
|
+
start = aliases[range.toUpperCase()] ?? parseInt(range, 10);
|
|
33
|
+
end = start;
|
|
34
|
+
}
|
|
35
|
+
if (isNaN(start) || isNaN(end)) continue;
|
|
36
|
+
if (target >= start && target <= end && (target - start) % step === 0) {
|
|
37
|
+
return true;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Checks if a given Date matches the cron expression.
|
|
45
|
+
* @param {string} cronExpr - 5-field cron expression
|
|
46
|
+
* @param {Date} date - Date to check
|
|
47
|
+
* @returns {boolean} True if matches, false otherwise
|
|
48
|
+
*/
|
|
49
|
+
export function matchesCron(cronExpr, date) {
|
|
50
|
+
if (typeof cronExpr !== 'string' || !(date instanceof Date) || isNaN(date.getTime())) return false;
|
|
51
|
+
const fields = cronExpr.trim().split(/\s+/);
|
|
52
|
+
if (fields.length !== 5) return false;
|
|
53
|
+
try {
|
|
54
|
+
return parseField(fields[0], date.getMinutes(), 0, 59, {}) &&
|
|
55
|
+
parseField(fields[1], date.getHours(), 0, 23, {}) &&
|
|
56
|
+
parseField(fields[2], date.getDate(), 1, 31, {}) &&
|
|
57
|
+
parseField(fields[3], date.getMonth() + 1, 1, 12, MONTHS) &&
|
|
58
|
+
parseField(fields[4], date.getDay(), 0, 6, DAYS);
|
|
59
|
+
} catch {
|
|
60
|
+
return false;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Finds the next Date (minute resolution) that matches the cron expression.
|
|
66
|
+
* @param {string} cronExpr - 5-field cron expression
|
|
67
|
+
* @param {Date} fromDate - Starting date
|
|
68
|
+
* @returns {Date|null} Next matching date or null if invalid
|
|
69
|
+
*/
|
|
70
|
+
export function nextCronRun(cronExpr, fromDate) {
|
|
71
|
+
if (typeof cronExpr !== 'string' || !(fromDate instanceof Date) || isNaN(fromDate.getTime())) return null;
|
|
72
|
+
if (cronExpr.trim().split(/\s+/).length !== 5) return null;
|
|
73
|
+
let nextDate = new Date(fromDate.getTime());
|
|
74
|
+
nextDate.setSeconds(0, 0);
|
|
75
|
+
nextDate.setMinutes(nextDate.getMinutes() + 1);
|
|
76
|
+
const maxYear = nextDate.getFullYear() + 5;
|
|
77
|
+
while (nextDate.getFullYear() < maxYear) {
|
|
78
|
+
if (matchesCron(cronExpr, nextDate)) {
|
|
79
|
+
return new Date(nextDate.getTime());
|
|
80
|
+
}
|
|
81
|
+
nextDate.setMinutes(nextDate.getMinutes() + 1);
|
|
82
|
+
}
|
|
83
|
+
return null;
|
|
84
|
+
}
|
|
@@ -0,0 +1,430 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Scheduler Daemon — standalone detached process.
|
|
5
|
+
* Reads schedule.json, spawns Gemini CLI agents on cron schedule.
|
|
6
|
+
* Survives parent process death (MCP server, IDE, CLI).
|
|
7
|
+
*
|
|
8
|
+
* Usage: spawned by MCP server with detached:true + unref()
|
|
9
|
+
* NOT meant to be run manually.
|
|
10
|
+
*
|
|
11
|
+
* @module agent-pool/scheduler/daemon
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync, unlinkSync } from 'node:fs';
|
|
15
|
+
import { spawn } from 'node:child_process';
|
|
16
|
+
import { join, dirname } from 'node:path';
|
|
17
|
+
import { matchesCron } from './cron.js';
|
|
18
|
+
|
|
19
|
+
const POLL_INTERVAL_MS = 30_000; // Check schedules every 30 seconds
|
|
20
|
+
const PID_FILE = '.agent/scheduler.pid';
|
|
21
|
+
const SCHEDULE_FILE = '.agent/schedule.json';
|
|
22
|
+
const RESULTS_DIR = '.agent/scheduled-results';
|
|
23
|
+
|
|
24
|
+
/** @type {string} */
|
|
25
|
+
const cwd = process.argv[2] || process.cwd();
|
|
26
|
+
|
|
27
|
+
// ─── PID file management ────────────────────────────────────
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Write PID file. Exit if another daemon is already running.
|
|
31
|
+
*/
|
|
32
|
+
function acquireLock() {
|
|
33
|
+
const pidPath = join(cwd, PID_FILE);
|
|
34
|
+
if (existsSync(pidPath)) {
|
|
35
|
+
try {
|
|
36
|
+
const existingPid = parseInt(readFileSync(pidPath, 'utf-8').trim());
|
|
37
|
+
// Check if process is still alive
|
|
38
|
+
process.kill(existingPid, 0);
|
|
39
|
+
// Process exists — exit, another daemon is running
|
|
40
|
+
console.error(`[scheduler] Another daemon running (pid ${existingPid}). Exiting.`);
|
|
41
|
+
process.exit(0);
|
|
42
|
+
} catch {
|
|
43
|
+
// Process dead — stale PID file, we can take over
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
mkdirSync(dirname(pidPath), { recursive: true });
|
|
47
|
+
writeFileSync(pidPath, String(process.pid));
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Remove PID file on exit.
|
|
52
|
+
*/
|
|
53
|
+
function releaseLock() {
|
|
54
|
+
try {
|
|
55
|
+
const pidPath = join(cwd, PID_FILE);
|
|
56
|
+
if (existsSync(pidPath)) {
|
|
57
|
+
const storedPid = readFileSync(pidPath, 'utf-8').trim();
|
|
58
|
+
if (storedPid === String(process.pid)) {
|
|
59
|
+
unlinkSync(pidPath);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
} catch { /* ignore */ }
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// ─── Schedule management ────────────────────────────────────
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Read schedules from JSON file.
|
|
69
|
+
* @returns {Array<{id: string, prompt: string, cron: string, cwd: string, skill?: string, approvalMode?: string, catchup?: boolean, lastRun?: string, createdAt: string}>}
|
|
70
|
+
*/
|
|
71
|
+
function readSchedules() {
|
|
72
|
+
const filePath = join(cwd, SCHEDULE_FILE);
|
|
73
|
+
if (!existsSync(filePath)) return [];
|
|
74
|
+
try {
|
|
75
|
+
return JSON.parse(readFileSync(filePath, 'utf-8'));
|
|
76
|
+
} catch {
|
|
77
|
+
return [];
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Update schedule in JSON file (e.g., set lastRun).
|
|
83
|
+
* @param {string} scheduleId
|
|
84
|
+
* @param {object} updates
|
|
85
|
+
*/
|
|
86
|
+
function updateSchedule(scheduleId, updates) {
|
|
87
|
+
const schedules = readSchedules();
|
|
88
|
+
const idx = schedules.findIndex((s) => s.id === scheduleId);
|
|
89
|
+
if (idx === -1) return;
|
|
90
|
+
Object.assign(schedules[idx], updates);
|
|
91
|
+
writeFileSync(join(cwd, SCHEDULE_FILE), JSON.stringify(schedules, null, 2));
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// ─── Gemini CLI execution ───────────────────────────────────
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Run a Gemini CLI task and save the result.
|
|
98
|
+
* @param {object} schedule
|
|
99
|
+
*/
|
|
100
|
+
function executeSchedule(schedule) {
|
|
101
|
+
const timestamp = Date.now();
|
|
102
|
+
const resultFile = join(cwd, RESULTS_DIR, `${schedule.id}_${timestamp}.json`);
|
|
103
|
+
mkdirSync(join(cwd, RESULTS_DIR), { recursive: true });
|
|
104
|
+
|
|
105
|
+
const args = [
|
|
106
|
+
'-p', schedule.prompt,
|
|
107
|
+
'--output-format', 'stream-json',
|
|
108
|
+
'--approval-mode', schedule.approvalMode || 'yolo',
|
|
109
|
+
];
|
|
110
|
+
if (schedule.skill) {
|
|
111
|
+
// Skills are pre-provisioned, just pass as part of prompt
|
|
112
|
+
args[1] = `Activate skill "${schedule.skill}" first.\n\n${schedule.prompt}`;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
const child = spawn('gemini', args, {
|
|
116
|
+
cwd: schedule.cwd || cwd,
|
|
117
|
+
env: { ...process.env, TERM: 'dumb', CI: '1' },
|
|
118
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
119
|
+
detached: true,
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
let stdout = '';
|
|
123
|
+
let stderr = '';
|
|
124
|
+
child.stdout.on('data', (d) => { stdout += d.toString(); });
|
|
125
|
+
child.stderr.on('data', (d) => { stderr += d.toString(); });
|
|
126
|
+
|
|
127
|
+
child.on('close', (code) => {
|
|
128
|
+
// Parse stream-json events for the final response
|
|
129
|
+
const events = stdout.split('\n').filter(Boolean).map((line) => {
|
|
130
|
+
try { return JSON.parse(line); } catch { return null; }
|
|
131
|
+
}).filter(Boolean);
|
|
132
|
+
|
|
133
|
+
const messages = events.filter((e) => e.type === 'message' && e.role === 'assistant');
|
|
134
|
+
const resultEvent = events.find((e) => e.type === 'result');
|
|
135
|
+
const response = resultEvent?.response || messages.map((m) => m.content || m.text || '').join('\n');
|
|
136
|
+
|
|
137
|
+
const result = {
|
|
138
|
+
scheduleId: schedule.id,
|
|
139
|
+
prompt: schedule.prompt,
|
|
140
|
+
cron: schedule.cron,
|
|
141
|
+
executedAt: new Date(timestamp).toISOString(),
|
|
142
|
+
completedAt: new Date().toISOString(),
|
|
143
|
+
exitCode: code,
|
|
144
|
+
response: response.substring(0, 5000),
|
|
145
|
+
totalEvents: events.length,
|
|
146
|
+
};
|
|
147
|
+
|
|
148
|
+
try {
|
|
149
|
+
writeFileSync(resultFile, JSON.stringify(result, null, 2));
|
|
150
|
+
} catch { /* ignore write errors */ }
|
|
151
|
+
|
|
152
|
+
console.error(`[scheduler] Completed: ${schedule.id} (exit: ${code})`);
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
child.stdin.end();
|
|
156
|
+
child.unref();
|
|
157
|
+
|
|
158
|
+
updateSchedule(schedule.id, { lastRun: new Date().toISOString() });
|
|
159
|
+
console.error(`[scheduler] Started: ${schedule.id} → gemini pid ${child.pid}`);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// ─── Pipeline tick ──────────────────────────────────────────
|
|
163
|
+
|
|
164
|
+
import { readdirSync } from 'node:fs';
|
|
165
|
+
|
|
166
|
+
const PIPELINES_DIR = '.agent/pipelines';
|
|
167
|
+
const RUNS_DIR = '.agent/runs';
|
|
168
|
+
|
|
169
|
+
/**
|
|
170
|
+
* Spawn a Gemini CLI agent for a pipeline step.
|
|
171
|
+
* @param {object} stepDef - Step definition from pipeline
|
|
172
|
+
* @param {object} run - Current run state
|
|
173
|
+
* @param {string} runId
|
|
174
|
+
* @param {string} [bounceReason] - If bouncing back, the reason
|
|
175
|
+
* @returns {number} child PID
|
|
176
|
+
*/
|
|
177
|
+
function spawnStep(stepDef, run, runId, bounceReason) {
|
|
178
|
+
let prompt = stepDef.prompt;
|
|
179
|
+
if (bounceReason) {
|
|
180
|
+
prompt = `${stepDef.prompt}\n\n⚠️ BOUNCE BACK: предыдущая попытка была отклонена следующим шагом.\nПричина: ${bounceReason}\nДополни и улучши результат.`;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// Inject pipeline context
|
|
184
|
+
prompt = `[Pipeline: ${run.pipelineName}, Step: ${stepDef.name}, Run: ${runId}]\n\nTask:\n${prompt}\n\nWhen finished, call signal_step_complete with step_name "${stepDef.name}" and run_id "${runId}".`;
|
|
185
|
+
|
|
186
|
+
const args = [
|
|
187
|
+
'-p', prompt,
|
|
188
|
+
'--output-format', 'stream-json',
|
|
189
|
+
'--approval-mode', stepDef.approvalMode || 'yolo',
|
|
190
|
+
];
|
|
191
|
+
|
|
192
|
+
const child = spawn('gemini', args, {
|
|
193
|
+
cwd: run.cwd || cwd,
|
|
194
|
+
env: { ...process.env, TERM: 'dumb', CI: '1' },
|
|
195
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
196
|
+
detached: true,
|
|
197
|
+
});
|
|
198
|
+
|
|
199
|
+
child.on('close', (code) => {
|
|
200
|
+
// Update step exit code in run state
|
|
201
|
+
try {
|
|
202
|
+
const currentRun = JSON.parse(readFileSync(join(cwd, RUNS_DIR, `${runId}.json`), 'utf-8'));
|
|
203
|
+
if (currentRun.steps[stepDef.name]) {
|
|
204
|
+
currentRun.steps[stepDef.name].exitCode = code;
|
|
205
|
+
}
|
|
206
|
+
writeFileSync(join(cwd, RUNS_DIR, `${runId}.json`), JSON.stringify(currentRun, null, 2));
|
|
207
|
+
} catch { /* ignore */ }
|
|
208
|
+
console.error(`[pipeline] Step "${stepDef.name}" exited (code: ${code}, run: ${runId})`);
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
child.stdin.end();
|
|
212
|
+
child.unref();
|
|
213
|
+
|
|
214
|
+
console.error(`[pipeline] Started step "${stepDef.name}" → pid ${child.pid} (run: ${runId})`);
|
|
215
|
+
return child.pid;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
/**
|
|
219
|
+
* Check if a process is alive.
|
|
220
|
+
* @param {number} pid
|
|
221
|
+
* @returns {boolean}
|
|
222
|
+
*/
|
|
223
|
+
function isAlive(pid) {
|
|
224
|
+
try { process.kill(pid, 0); return true; }
|
|
225
|
+
catch { return false; }
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Process pipeline runs — check triggers, advance steps.
|
|
230
|
+
* @returns {boolean} true if any pipeline is actively running
|
|
231
|
+
*/
|
|
232
|
+
function tickPipelines() {
|
|
233
|
+
const runsDir = join(cwd, RUNS_DIR);
|
|
234
|
+
if (!existsSync(runsDir)) return false;
|
|
235
|
+
|
|
236
|
+
const pipelinesDir = join(cwd, PIPELINES_DIR);
|
|
237
|
+
let hasActive = false;
|
|
238
|
+
|
|
239
|
+
for (const file of readdirSync(runsDir).filter(f => f.endsWith('.json'))) {
|
|
240
|
+
let run;
|
|
241
|
+
try { run = JSON.parse(readFileSync(join(runsDir, file), 'utf-8')); }
|
|
242
|
+
catch { continue; }
|
|
243
|
+
|
|
244
|
+
if (run.status !== 'running') continue;
|
|
245
|
+
hasActive = true;
|
|
246
|
+
|
|
247
|
+
// Load pipeline definition
|
|
248
|
+
let pipeline;
|
|
249
|
+
try {
|
|
250
|
+
pipeline = JSON.parse(readFileSync(join(pipelinesDir, `${run.pipeline}.json`), 'utf-8'));
|
|
251
|
+
} catch { continue; }
|
|
252
|
+
|
|
253
|
+
const runId = file.replace('.json', '');
|
|
254
|
+
let modified = false;
|
|
255
|
+
|
|
256
|
+
for (const stepDef of pipeline.steps) {
|
|
257
|
+
const step = run.steps[stepDef.name];
|
|
258
|
+
if (!step) continue;
|
|
259
|
+
|
|
260
|
+
// ── Handle bounce_pending: re-run the step ──
|
|
261
|
+
if (step.status === 'bounce_pending') {
|
|
262
|
+
step.status = 'running';
|
|
263
|
+
step.startedAt = new Date().toISOString();
|
|
264
|
+
step.pid = spawnStep(stepDef, run, runId, step.lastBounceReason);
|
|
265
|
+
modified = true;
|
|
266
|
+
continue;
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
// ── Handle running steps: check if process died ──
|
|
270
|
+
if (step.status === 'running' && step.pid) {
|
|
271
|
+
if (!isAlive(step.pid)) {
|
|
272
|
+
// Process is dead — did agent signal?
|
|
273
|
+
if (!step.signaled) {
|
|
274
|
+
// Auto-fallback: check exit code
|
|
275
|
+
if (step.exitCode === 0 || step.exitCode === null) {
|
|
276
|
+
// Treat as success (agent forgot to signal)
|
|
277
|
+
step.status = 'success';
|
|
278
|
+
step.completedAt = new Date().toISOString();
|
|
279
|
+
console.error(`[pipeline] Step "${stepDef.name}" auto-completed (pid dead, exit: ${step.exitCode})`);
|
|
280
|
+
} else {
|
|
281
|
+
// Failed
|
|
282
|
+
step.status = 'failed';
|
|
283
|
+
step.completedAt = new Date().toISOString();
|
|
284
|
+
console.error(`[pipeline] Step "${stepDef.name}" failed (exit: ${step.exitCode})`);
|
|
285
|
+
if (pipeline.onError === 'stop') {
|
|
286
|
+
run.status = 'failed';
|
|
287
|
+
run.completedAt = new Date().toISOString();
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
modified = true;
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
continue;
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
// ── Handle pending steps: check trigger ──
|
|
297
|
+
if (step.status === 'pending') {
|
|
298
|
+
let shouldStart = false;
|
|
299
|
+
|
|
300
|
+
if (stepDef.trigger === 'start') {
|
|
301
|
+
// First step — always start
|
|
302
|
+
shouldStart = true;
|
|
303
|
+
} else if (stepDef.trigger?.type === 'on_complete') {
|
|
304
|
+
const depStep = run.steps[stepDef.trigger.step];
|
|
305
|
+
shouldStart = depStep?.status === 'success';
|
|
306
|
+
} else if (stepDef.trigger?.type === 'on_complete_all') {
|
|
307
|
+
// Fan-in: wait for ALL listed steps to complete
|
|
308
|
+
const deps = stepDef.trigger.steps || [];
|
|
309
|
+
shouldStart = deps.length > 0 && deps.every(
|
|
310
|
+
name => run.steps[name]?.status === 'success',
|
|
311
|
+
);
|
|
312
|
+
} else if (stepDef.trigger?.type === 'on_file') {
|
|
313
|
+
const filePath = join(run.cwd || cwd, stepDef.trigger.path);
|
|
314
|
+
if (existsSync(filePath)) {
|
|
315
|
+
// File exists — check if producing process is dead
|
|
316
|
+
const depStepName = pipeline.steps[pipeline.steps.indexOf(stepDef) - 1]?.name;
|
|
317
|
+
const depStep = depStepName ? run.steps[depStepName] : null;
|
|
318
|
+
if (!depStep?.pid || !isAlive(depStep.pid)) {
|
|
319
|
+
shouldStart = true;
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
if (shouldStart && run.status === 'running') {
|
|
325
|
+
step.status = 'running';
|
|
326
|
+
step.startedAt = new Date().toISOString();
|
|
327
|
+
step.pid = spawnStep(stepDef, run, runId);
|
|
328
|
+
modified = true;
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
// ── Handle waiting_bounce: restart when bounced step completes ──
|
|
333
|
+
if (step.status === 'waiting_bounce') {
|
|
334
|
+
const depStepName = stepDef.trigger?.step;
|
|
335
|
+
if (depStepName && run.steps[depStepName]?.status === 'success') {
|
|
336
|
+
step.status = 'running';
|
|
337
|
+
step.startedAt = new Date().toISOString();
|
|
338
|
+
step.pid = spawnStep(stepDef, run, runId);
|
|
339
|
+
modified = true;
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
// Check if all steps are done
|
|
345
|
+
const allDone = Object.values(run.steps).every(s =>
|
|
346
|
+
s.status === 'success' || s.status === 'failed' || s.status === 'skipped' || s.status === 'cancelled',
|
|
347
|
+
);
|
|
348
|
+
if (allDone && run.status === 'running') {
|
|
349
|
+
const hasFailed = Object.values(run.steps).some(s => s.status === 'failed');
|
|
350
|
+
run.status = hasFailed ? 'failed' : 'success';
|
|
351
|
+
run.completedAt = new Date().toISOString();
|
|
352
|
+
modified = true;
|
|
353
|
+
console.error(`[pipeline] Run ${runId} completed: ${run.status}`);
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
if (modified) {
|
|
357
|
+
writeFileSync(join(runsDir, file), JSON.stringify(run, null, 2));
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
return hasActive;
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
// ─── Main loop ──────────────────────────────────────────────
|
|
365
|
+
|
|
366
|
+
function tick() {
|
|
367
|
+
const now = new Date();
|
|
368
|
+
const schedules = readSchedules();
|
|
369
|
+
const hasActivePipeline = tickPipelines();
|
|
370
|
+
|
|
371
|
+
if (schedules.length === 0 && !hasActivePipeline) {
|
|
372
|
+
// No work — check for pipeline definitions before exiting
|
|
373
|
+
const pipelinesDir = join(cwd, PIPELINES_DIR);
|
|
374
|
+
const runsDir = join(cwd, RUNS_DIR);
|
|
375
|
+
const hasRuns = existsSync(runsDir) && readdirSync(runsDir).some(f => f.endsWith('.json'));
|
|
376
|
+
if (!hasRuns) {
|
|
377
|
+
console.error('[scheduler] No schedules or active pipelines. Daemon exiting.');
|
|
378
|
+
releaseLock();
|
|
379
|
+
process.exit(0);
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
for (const schedule of schedules) {
|
|
384
|
+
if (!schedule.cron || !schedule.prompt) continue;
|
|
385
|
+
|
|
386
|
+
// Check if cron matches current minute
|
|
387
|
+
if (!matchesCron(schedule.cron, now)) continue;
|
|
388
|
+
|
|
389
|
+
// Deduplicate: don't run if already ran this minute
|
|
390
|
+
if (schedule.lastRun) {
|
|
391
|
+
const lastRun = new Date(schedule.lastRun);
|
|
392
|
+
if (lastRun.getFullYear() === now.getFullYear() &&
|
|
393
|
+
lastRun.getMonth() === now.getMonth() &&
|
|
394
|
+
lastRun.getDate() === now.getDate() &&
|
|
395
|
+
lastRun.getHours() === now.getHours() &&
|
|
396
|
+
lastRun.getMinutes() === now.getMinutes()) {
|
|
397
|
+
continue; // Already ran this minute
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
// Atomic execution lock (prevents dual-daemon runs)
|
|
402
|
+
const lockFile = join(cwd, '.agent', 'locks', `${schedule.id}_${now.getTime()}.lock`);
|
|
403
|
+
try {
|
|
404
|
+
mkdirSync(dirname(lockFile), { recursive: true });
|
|
405
|
+
writeFileSync(lockFile, String(process.pid), { flag: 'wx' }); // wx = fail if exists
|
|
406
|
+
} catch {
|
|
407
|
+
continue; // Another daemon got the lock
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
executeSchedule(schedule);
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
// Adaptive polling: fast when pipeline active, slow otherwise
|
|
414
|
+
const nextTickMs = hasActivePipeline ? 3000 : 30000;
|
|
415
|
+
setTimeout(tick, nextTickMs);
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
// ─── Startup ────────────────────────────────────────────────
|
|
419
|
+
|
|
420
|
+
acquireLock();
|
|
421
|
+
|
|
422
|
+
process.on('SIGINT', () => { releaseLock(); process.exit(0); });
|
|
423
|
+
process.on('SIGTERM', () => { releaseLock(); process.exit(0); });
|
|
424
|
+
|
|
425
|
+
console.error(`[scheduler] Daemon started (pid ${process.pid}, cwd: ${cwd})`);
|
|
426
|
+
console.error(`[scheduler] Adaptive polling: 3s active / 30s idle`);
|
|
427
|
+
|
|
428
|
+
// Start the loop
|
|
429
|
+
tick();
|
|
430
|
+
|