agent-pool-mcp 1.0.1 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +68 -6
- package/index.js +1 -1
- package/package.json +3 -1
- package/src/scheduler/cron.js +84 -0
- package/src/scheduler/daemon.js +430 -0
- package/src/scheduler/pipeline.js +354 -0
- package/src/scheduler/scheduler.js +207 -0
- package/src/server.js +269 -1
- package/src/tool-definitions.js +168 -0
package/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# agent-pool-mcp
|
|
2
2
|
|
|
3
|
-
**MCP server for multi-agent orchestration** — parallel task delegation and cross-model peer review via [Gemini CLI](https://github.com/google-gemini/gemini-cli).
|
|
3
|
+
**MCP server for multi-agent orchestration** — parallel task delegation, sequential pipelines, cron scheduling, and cross-model peer review via [Gemini CLI](https://github.com/google-gemini/gemini-cli).
|
|
4
4
|
|
|
5
5
|
> Developed by [RND-PRO](https://rnd-pro.com)
|
|
6
6
|
|
|
@@ -38,6 +38,60 @@ When the primary agent and Gemini workers are **different foundation models** (e
|
|
|
38
38
|
- **`get_task_result`** — Poll task status, retrieve results, and see live progress (last 200 tool/message events).
|
|
39
39
|
- **`cancel_task`** — Kill a running task and its entire process group immediately.
|
|
40
40
|
|
|
41
|
+
### 🔗 Pipelines — Sequential Task Chains
|
|
42
|
+
Define multi-step workflows where agents execute sequentially, with automatic handoff:
|
|
43
|
+
|
|
44
|
+
```
|
|
45
|
+
┌─ frontend ─┐
|
|
46
|
+
research ─┤ ├── deploy
|
|
47
|
+
└─ backend ─┘
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
- **`create_pipeline`** — Define a pipeline with named steps, triggers, and timeouts.
|
|
51
|
+
- **`run_pipeline`** — Start executing a pipeline. A detached daemon manages the lifecycle.
|
|
52
|
+
- **`list_pipelines`** — See all definitions, active runs, and recent completions.
|
|
53
|
+
- **`get_pipeline_status`** — Step-by-step status with emoji indicators.
|
|
54
|
+
- **`cancel_pipeline`** — Stop a running pipeline and kill active step processes.
|
|
55
|
+
|
|
56
|
+
**Agent Signals** (called BY agents running inside pipeline steps):
|
|
57
|
+
- **`signal_step_complete`** — Mark the current step as done. Accepts optional output and `run_id`.
|
|
58
|
+
- **`bounce_back`** — Return task to a previous step with feedback (e.g. "data incomplete"). Supports `maxBounces` limit.
|
|
59
|
+
|
|
60
|
+
**Triggers:**
|
|
61
|
+
|
|
62
|
+
| Trigger | Description |
|
|
63
|
+
|---------|-------------|
|
|
64
|
+
| `on_complete` | Start when a specific step succeeds |
|
|
65
|
+
| `on_complete_all` | Fan-in: start when ALL listed steps succeed |
|
|
66
|
+
| `on_file` | Start when a file appears and the producing process exits |
|
|
67
|
+
| Auto-fallback | Process death without signal → auto-complete/fail |
|
|
68
|
+
|
|
69
|
+
**Example — 3-step pipeline:**
|
|
70
|
+
```javascript
|
|
71
|
+
// Agent creates the pipeline
|
|
72
|
+
create_pipeline({
|
|
73
|
+
name: "article-workflow",
|
|
74
|
+
steps: [
|
|
75
|
+
{ name: "research", prompt: "Research the topic and write notes to research.md" },
|
|
76
|
+
{ name: "draft", prompt: "Read research.md and write article draft" },
|
|
77
|
+
{ name: "review", prompt: "Review the draft for accuracy and style" }
|
|
78
|
+
]
|
|
79
|
+
})
|
|
80
|
+
|
|
81
|
+
// Agent starts execution — daemon handles the rest
|
|
82
|
+
run_pipeline({ pipeline_id: "article-workflow" })
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
### ⏰ Cron Scheduler
|
|
86
|
+
Schedule agents to run automatically on a cron schedule:
|
|
87
|
+
|
|
88
|
+
- **`schedule_task`** — Schedule a Gemini CLI agent with cron expression (e.g. `0 9 * * MON-FRI`).
|
|
89
|
+
- **`list_schedules`** — See all schedules with next run times and daemon status.
|
|
90
|
+
- **`cancel_schedule`** — Remove a schedule. Daemon auto-exits when no schedules remain.
|
|
91
|
+
- **`get_scheduled_results`** — Retrieve results from past scheduled executions.
|
|
92
|
+
|
|
93
|
+
The scheduler runs as a **detached daemon** that survives IDE/CLI restarts. It uses atomic file locks to prevent duplicate execution when multiple clients are connected.
|
|
94
|
+
|
|
41
95
|
### 📋 3-Tier Skill System
|
|
42
96
|
Skills are Markdown files with YAML frontmatter that extend agent behavior. Agent-pool manages skills in three tiers:
|
|
43
97
|
1. **Project**: `.gemini/skills/` (local to repo, takes precedence).
|
|
@@ -210,11 +264,16 @@ src/
|
|
|
210
264
|
│ ├── consult.js ← Peer review via Gemini CLI
|
|
211
265
|
│ ├── results.js ← Task store + result formatting (TTL cleanup, ring buffer)
|
|
212
266
|
│ └── skills.js ← 3-tier skill management (project/global/built-in)
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
267
|
+
├── runner/
|
|
268
|
+
│ ├── config.js ← Runner config loader (local/SSH)
|
|
269
|
+
│ ├── gemini-runner.js ← Process spawning (streaming JSON, depth tracking)
|
|
270
|
+
│ ├── process-manager.js ← PID tracking, system load awareness, group kill
|
|
271
|
+
│ └── ssh.js ← Shell escaping, remote PID tracking
|
|
272
|
+
└── scheduler/
|
|
273
|
+
├── cron.js ← Minimal cron expression parser (zero-dependency)
|
|
274
|
+
├── daemon.js ← Detached daemon: schedule ticks + pipeline lifecycle
|
|
275
|
+
├── pipeline.js ← Pipeline CRUD, run state, signals, bounce-back
|
|
276
|
+
└── scheduler.js ← Schedule management + daemon spawning
|
|
218
277
|
```
|
|
219
278
|
|
|
220
279
|
**Process management:**
|
|
@@ -222,7 +281,10 @@ src/
|
|
|
222
281
|
- **TTL Cleanup**: Completed task results are purged from memory after 10 minutes.
|
|
223
282
|
- **Live Events**: Progress polling uses a ring buffer to show the latest activity without overwhelming context.
|
|
224
283
|
- **Depth Tracking**: Nested orchestration support with optional `AGENT_POOL_MAX_DEPTH` limit.
|
|
284
|
+
- **Adaptive Polling**: Pipeline daemon uses 3s intervals when active, 30s when idle.
|
|
285
|
+
- **File-Based Communication**: Pipeline agents communicate through `.agent/runs/` JSON files — each Gemini process has its own MCP server instance but shares state via filesystem.
|
|
225
286
|
|
|
226
287
|
## License
|
|
227
288
|
|
|
228
289
|
MIT
|
|
290
|
+
|
package/index.js
CHANGED
|
@@ -33,6 +33,6 @@ async function startServer() {
|
|
|
33
33
|
const server = createServer();
|
|
34
34
|
const transport = new StdioServerTransport();
|
|
35
35
|
await server.connect(transport);
|
|
36
|
-
console.error('[agent-pool] MCP server v1.
|
|
36
|
+
console.error('[agent-pool] MCP server v1.2.1 started');
|
|
37
37
|
}
|
|
38
38
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "agent-pool-mcp",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.2.1",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"description": "MCP Server for multi-agent task delegation and orchestration via Gemini CLI",
|
|
6
6
|
"main": "index.js",
|
|
@@ -27,6 +27,8 @@
|
|
|
27
27
|
"multi-agent",
|
|
28
28
|
"delegation",
|
|
29
29
|
"orchestration",
|
|
30
|
+
"scheduler",
|
|
31
|
+
"cron",
|
|
30
32
|
"ai"
|
|
31
33
|
],
|
|
32
34
|
"author": "Vladislav Matiyasevich",
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Minimal cron expression parser.
|
|
3
|
+
* Supports: star, star/N (step), N, N-M (range), N,M (list), MON-FRI for weekdays (0=Sun, 6=Sat).
|
|
4
|
+
* 5 fields: minute hour day month weekday
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
const MONTHS = { JAN: 1, FEB: 2, MAR: 3, APR: 4, MAY: 5, JUN: 6, JUL: 7, AUG: 8, SEP: 9, OCT: 10, NOV: 11, DEC: 12 };
|
|
8
|
+
const DAYS = { SUN: 0, MON: 1, TUE: 2, WED: 3, THU: 4, FRI: 5, SAT: 6 };
|
|
9
|
+
|
|
10
|
+
function parseField(value, target, min, max, aliases) {
|
|
11
|
+
if (value === '*') return true;
|
|
12
|
+
const parts = value.split(',');
|
|
13
|
+
for (let part of parts) {
|
|
14
|
+
let step = 1;
|
|
15
|
+
let range = part;
|
|
16
|
+
if (part.includes('/')) {
|
|
17
|
+
const split = part.split('/');
|
|
18
|
+
range = split[0];
|
|
19
|
+
step = parseInt(split[1], 10);
|
|
20
|
+
if (isNaN(step)) return false;
|
|
21
|
+
}
|
|
22
|
+
if (range === '*') {
|
|
23
|
+
if ((target - min) % step === 0) return true;
|
|
24
|
+
continue;
|
|
25
|
+
}
|
|
26
|
+
let start, end;
|
|
27
|
+
if (range.includes('-')) {
|
|
28
|
+
const bounds = range.split('-');
|
|
29
|
+
start = aliases[bounds[0].toUpperCase()] ?? parseInt(bounds[0], 10);
|
|
30
|
+
end = aliases[bounds[1].toUpperCase()] ?? parseInt(bounds[1], 10);
|
|
31
|
+
} else {
|
|
32
|
+
start = aliases[range.toUpperCase()] ?? parseInt(range, 10);
|
|
33
|
+
end = start;
|
|
34
|
+
}
|
|
35
|
+
if (isNaN(start) || isNaN(end)) continue;
|
|
36
|
+
if (target >= start && target <= end && (target - start) % step === 0) {
|
|
37
|
+
return true;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Checks if a given Date matches the cron expression.
|
|
45
|
+
* @param {string} cronExpr - 5-field cron expression
|
|
46
|
+
* @param {Date} date - Date to check
|
|
47
|
+
* @returns {boolean} True if matches, false otherwise
|
|
48
|
+
*/
|
|
49
|
+
export function matchesCron(cronExpr, date) {
|
|
50
|
+
if (typeof cronExpr !== 'string' || !(date instanceof Date) || isNaN(date.getTime())) return false;
|
|
51
|
+
const fields = cronExpr.trim().split(/\s+/);
|
|
52
|
+
if (fields.length !== 5) return false;
|
|
53
|
+
try {
|
|
54
|
+
return parseField(fields[0], date.getMinutes(), 0, 59, {}) &&
|
|
55
|
+
parseField(fields[1], date.getHours(), 0, 23, {}) &&
|
|
56
|
+
parseField(fields[2], date.getDate(), 1, 31, {}) &&
|
|
57
|
+
parseField(fields[3], date.getMonth() + 1, 1, 12, MONTHS) &&
|
|
58
|
+
parseField(fields[4], date.getDay(), 0, 6, DAYS);
|
|
59
|
+
} catch {
|
|
60
|
+
return false;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
/**
|
|
65
|
+
* Finds the next Date (minute resolution) that matches the cron expression.
|
|
66
|
+
* @param {string} cronExpr - 5-field cron expression
|
|
67
|
+
* @param {Date} fromDate - Starting date
|
|
68
|
+
* @returns {Date|null} Next matching date or null if invalid
|
|
69
|
+
*/
|
|
70
|
+
export function nextCronRun(cronExpr, fromDate) {
|
|
71
|
+
if (typeof cronExpr !== 'string' || !(fromDate instanceof Date) || isNaN(fromDate.getTime())) return null;
|
|
72
|
+
if (cronExpr.trim().split(/\s+/).length !== 5) return null;
|
|
73
|
+
let nextDate = new Date(fromDate.getTime());
|
|
74
|
+
nextDate.setSeconds(0, 0);
|
|
75
|
+
nextDate.setMinutes(nextDate.getMinutes() + 1);
|
|
76
|
+
const maxYear = nextDate.getFullYear() + 5;
|
|
77
|
+
while (nextDate.getFullYear() < maxYear) {
|
|
78
|
+
if (matchesCron(cronExpr, nextDate)) {
|
|
79
|
+
return new Date(nextDate.getTime());
|
|
80
|
+
}
|
|
81
|
+
nextDate.setMinutes(nextDate.getMinutes() + 1);
|
|
82
|
+
}
|
|
83
|
+
return null;
|
|
84
|
+
}
|
|
@@ -0,0 +1,430 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Scheduler Daemon — standalone detached process.
|
|
5
|
+
* Reads schedule.json, spawns Gemini CLI agents on cron schedule.
|
|
6
|
+
* Survives parent process death (MCP server, IDE, CLI).
|
|
7
|
+
*
|
|
8
|
+
* Usage: spawned by MCP server with detached:true + unref()
|
|
9
|
+
* NOT meant to be run manually.
|
|
10
|
+
*
|
|
11
|
+
* @module agent-pool/scheduler/daemon
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { readFileSync, writeFileSync, existsSync, mkdirSync, unlinkSync } from 'node:fs';
|
|
15
|
+
import { spawn } from 'node:child_process';
|
|
16
|
+
import { join, dirname } from 'node:path';
|
|
17
|
+
import { matchesCron } from './cron.js';
|
|
18
|
+
|
|
19
|
+
const POLL_INTERVAL_MS = 30_000; // Check schedules every 30 seconds
|
|
20
|
+
const PID_FILE = '.agent/scheduler.pid';
|
|
21
|
+
const SCHEDULE_FILE = '.agent/schedule.json';
|
|
22
|
+
const RESULTS_DIR = '.agent/scheduled-results';
|
|
23
|
+
|
|
24
|
+
/** @type {string} */
|
|
25
|
+
const cwd = process.argv[2] || process.cwd();
|
|
26
|
+
|
|
27
|
+
// ─── PID file management ────────────────────────────────────
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Write PID file. Exit if another daemon is already running.
|
|
31
|
+
*/
|
|
32
|
+
function acquireLock() {
|
|
33
|
+
const pidPath = join(cwd, PID_FILE);
|
|
34
|
+
if (existsSync(pidPath)) {
|
|
35
|
+
try {
|
|
36
|
+
const existingPid = parseInt(readFileSync(pidPath, 'utf-8').trim());
|
|
37
|
+
// Check if process is still alive
|
|
38
|
+
process.kill(existingPid, 0);
|
|
39
|
+
// Process exists — exit, another daemon is running
|
|
40
|
+
console.error(`[scheduler] Another daemon running (pid ${existingPid}). Exiting.`);
|
|
41
|
+
process.exit(0);
|
|
42
|
+
} catch {
|
|
43
|
+
// Process dead — stale PID file, we can take over
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
mkdirSync(dirname(pidPath), { recursive: true });
|
|
47
|
+
writeFileSync(pidPath, String(process.pid));
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Remove PID file on exit.
|
|
52
|
+
*/
|
|
53
|
+
function releaseLock() {
|
|
54
|
+
try {
|
|
55
|
+
const pidPath = join(cwd, PID_FILE);
|
|
56
|
+
if (existsSync(pidPath)) {
|
|
57
|
+
const storedPid = readFileSync(pidPath, 'utf-8').trim();
|
|
58
|
+
if (storedPid === String(process.pid)) {
|
|
59
|
+
unlinkSync(pidPath);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
} catch { /* ignore */ }
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// ─── Schedule management ────────────────────────────────────
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Read schedules from JSON file.
|
|
69
|
+
* @returns {Array<{id: string, prompt: string, cron: string, cwd: string, skill?: string, approvalMode?: string, catchup?: boolean, lastRun?: string, createdAt: string}>}
|
|
70
|
+
*/
|
|
71
|
+
function readSchedules() {
|
|
72
|
+
const filePath = join(cwd, SCHEDULE_FILE);
|
|
73
|
+
if (!existsSync(filePath)) return [];
|
|
74
|
+
try {
|
|
75
|
+
return JSON.parse(readFileSync(filePath, 'utf-8'));
|
|
76
|
+
} catch {
|
|
77
|
+
return [];
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Update schedule in JSON file (e.g., set lastRun).
|
|
83
|
+
* @param {string} scheduleId
|
|
84
|
+
* @param {object} updates
|
|
85
|
+
*/
|
|
86
|
+
function updateSchedule(scheduleId, updates) {
|
|
87
|
+
const schedules = readSchedules();
|
|
88
|
+
const idx = schedules.findIndex((s) => s.id === scheduleId);
|
|
89
|
+
if (idx === -1) return;
|
|
90
|
+
Object.assign(schedules[idx], updates);
|
|
91
|
+
writeFileSync(join(cwd, SCHEDULE_FILE), JSON.stringify(schedules, null, 2));
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// ─── Gemini CLI execution ───────────────────────────────────
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Run a Gemini CLI task and save the result.
|
|
98
|
+
* @param {object} schedule
|
|
99
|
+
*/
|
|
100
|
+
function executeSchedule(schedule) {
|
|
101
|
+
const timestamp = Date.now();
|
|
102
|
+
const resultFile = join(cwd, RESULTS_DIR, `${schedule.id}_${timestamp}.json`);
|
|
103
|
+
mkdirSync(join(cwd, RESULTS_DIR), { recursive: true });
|
|
104
|
+
|
|
105
|
+
const args = [
|
|
106
|
+
'-p', schedule.prompt,
|
|
107
|
+
'--output-format', 'stream-json',
|
|
108
|
+
'--approval-mode', schedule.approvalMode || 'yolo',
|
|
109
|
+
];
|
|
110
|
+
if (schedule.skill) {
|
|
111
|
+
// Skills are pre-provisioned, just pass as part of prompt
|
|
112
|
+
args[1] = `Activate skill "${schedule.skill}" first.\n\n${schedule.prompt}`;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
const child = spawn('gemini', args, {
|
|
116
|
+
cwd: schedule.cwd || cwd,
|
|
117
|
+
env: { ...process.env, TERM: 'dumb', CI: '1' },
|
|
118
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
119
|
+
detached: true,
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
let stdout = '';
|
|
123
|
+
let stderr = '';
|
|
124
|
+
child.stdout.on('data', (d) => { stdout += d.toString(); });
|
|
125
|
+
child.stderr.on('data', (d) => { stderr += d.toString(); });
|
|
126
|
+
|
|
127
|
+
child.on('close', (code) => {
|
|
128
|
+
// Parse stream-json events for the final response
|
|
129
|
+
const events = stdout.split('\n').filter(Boolean).map((line) => {
|
|
130
|
+
try { return JSON.parse(line); } catch { return null; }
|
|
131
|
+
}).filter(Boolean);
|
|
132
|
+
|
|
133
|
+
const messages = events.filter((e) => e.type === 'message' && e.role === 'assistant');
|
|
134
|
+
const resultEvent = events.find((e) => e.type === 'result');
|
|
135
|
+
const response = resultEvent?.response || messages.map((m) => m.content || m.text || '').join('\n');
|
|
136
|
+
|
|
137
|
+
const result = {
|
|
138
|
+
scheduleId: schedule.id,
|
|
139
|
+
prompt: schedule.prompt,
|
|
140
|
+
cron: schedule.cron,
|
|
141
|
+
executedAt: new Date(timestamp).toISOString(),
|
|
142
|
+
completedAt: new Date().toISOString(),
|
|
143
|
+
exitCode: code,
|
|
144
|
+
response: response.substring(0, 5000),
|
|
145
|
+
totalEvents: events.length,
|
|
146
|
+
};
|
|
147
|
+
|
|
148
|
+
try {
|
|
149
|
+
writeFileSync(resultFile, JSON.stringify(result, null, 2));
|
|
150
|
+
} catch { /* ignore write errors */ }
|
|
151
|
+
|
|
152
|
+
console.error(`[scheduler] Completed: ${schedule.id} (exit: ${code})`);
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
child.stdin.end();
|
|
156
|
+
child.unref();
|
|
157
|
+
|
|
158
|
+
updateSchedule(schedule.id, { lastRun: new Date().toISOString() });
|
|
159
|
+
console.error(`[scheduler] Started: ${schedule.id} → gemini pid ${child.pid}`);
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// ─── Pipeline tick ──────────────────────────────────────────
|
|
163
|
+
|
|
164
|
+
import { readdirSync } from 'node:fs';
|
|
165
|
+
|
|
166
|
+
const PIPELINES_DIR = '.agent/pipelines';
|
|
167
|
+
const RUNS_DIR = '.agent/runs';
|
|
168
|
+
|
|
169
|
+
/**
|
|
170
|
+
* Spawn a Gemini CLI agent for a pipeline step.
|
|
171
|
+
* @param {object} stepDef - Step definition from pipeline
|
|
172
|
+
* @param {object} run - Current run state
|
|
173
|
+
* @param {string} runId
|
|
174
|
+
* @param {string} [bounceReason] - If bouncing back, the reason
|
|
175
|
+
* @returns {number} child PID
|
|
176
|
+
*/
|
|
177
|
+
function spawnStep(stepDef, run, runId, bounceReason) {
|
|
178
|
+
let prompt = stepDef.prompt;
|
|
179
|
+
if (bounceReason) {
|
|
180
|
+
prompt = `${stepDef.prompt}\n\n⚠️ BOUNCE BACK: предыдущая попытка была отклонена следующим шагом.\nПричина: ${bounceReason}\nДополни и улучши результат.`;
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// Inject pipeline context
|
|
184
|
+
prompt = `[Pipeline: ${run.pipelineName}, Step: ${stepDef.name}, Run: ${runId}]\n\nTask:\n${prompt}\n\nWhen finished, call signal_step_complete with step_name "${stepDef.name}" and run_id "${runId}".`;
|
|
185
|
+
|
|
186
|
+
const args = [
|
|
187
|
+
'-p', prompt,
|
|
188
|
+
'--output-format', 'stream-json',
|
|
189
|
+
'--approval-mode', stepDef.approvalMode || 'yolo',
|
|
190
|
+
];
|
|
191
|
+
|
|
192
|
+
const child = spawn('gemini', args, {
|
|
193
|
+
cwd: run.cwd || cwd,
|
|
194
|
+
env: { ...process.env, TERM: 'dumb', CI: '1' },
|
|
195
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
196
|
+
detached: true,
|
|
197
|
+
});
|
|
198
|
+
|
|
199
|
+
child.on('close', (code) => {
|
|
200
|
+
// Update step exit code in run state
|
|
201
|
+
try {
|
|
202
|
+
const currentRun = JSON.parse(readFileSync(join(cwd, RUNS_DIR, `${runId}.json`), 'utf-8'));
|
|
203
|
+
if (currentRun.steps[stepDef.name]) {
|
|
204
|
+
currentRun.steps[stepDef.name].exitCode = code;
|
|
205
|
+
}
|
|
206
|
+
writeFileSync(join(cwd, RUNS_DIR, `${runId}.json`), JSON.stringify(currentRun, null, 2));
|
|
207
|
+
} catch { /* ignore */ }
|
|
208
|
+
console.error(`[pipeline] Step "${stepDef.name}" exited (code: ${code}, run: ${runId})`);
|
|
209
|
+
});
|
|
210
|
+
|
|
211
|
+
child.stdin.end();
|
|
212
|
+
child.unref();
|
|
213
|
+
|
|
214
|
+
console.error(`[pipeline] Started step "${stepDef.name}" → pid ${child.pid} (run: ${runId})`);
|
|
215
|
+
return child.pid;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
/**
|
|
219
|
+
* Check if a process is alive.
|
|
220
|
+
* @param {number} pid
|
|
221
|
+
* @returns {boolean}
|
|
222
|
+
*/
|
|
223
|
+
function isAlive(pid) {
|
|
224
|
+
try { process.kill(pid, 0); return true; }
|
|
225
|
+
catch { return false; }
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Process pipeline runs — check triggers, advance steps.
|
|
230
|
+
* @returns {boolean} true if any pipeline is actively running
|
|
231
|
+
*/
|
|
232
|
+
function tickPipelines() {
|
|
233
|
+
const runsDir = join(cwd, RUNS_DIR);
|
|
234
|
+
if (!existsSync(runsDir)) return false;
|
|
235
|
+
|
|
236
|
+
const pipelinesDir = join(cwd, PIPELINES_DIR);
|
|
237
|
+
let hasActive = false;
|
|
238
|
+
|
|
239
|
+
for (const file of readdirSync(runsDir).filter(f => f.endsWith('.json'))) {
|
|
240
|
+
let run;
|
|
241
|
+
try { run = JSON.parse(readFileSync(join(runsDir, file), 'utf-8')); }
|
|
242
|
+
catch { continue; }
|
|
243
|
+
|
|
244
|
+
if (run.status !== 'running') continue;
|
|
245
|
+
hasActive = true;
|
|
246
|
+
|
|
247
|
+
// Load pipeline definition
|
|
248
|
+
let pipeline;
|
|
249
|
+
try {
|
|
250
|
+
pipeline = JSON.parse(readFileSync(join(pipelinesDir, `${run.pipeline}.json`), 'utf-8'));
|
|
251
|
+
} catch { continue; }
|
|
252
|
+
|
|
253
|
+
const runId = file.replace('.json', '');
|
|
254
|
+
let modified = false;
|
|
255
|
+
|
|
256
|
+
for (const stepDef of pipeline.steps) {
|
|
257
|
+
const step = run.steps[stepDef.name];
|
|
258
|
+
if (!step) continue;
|
|
259
|
+
|
|
260
|
+
// ── Handle bounce_pending: re-run the step ──
|
|
261
|
+
if (step.status === 'bounce_pending') {
|
|
262
|
+
step.status = 'running';
|
|
263
|
+
step.startedAt = new Date().toISOString();
|
|
264
|
+
step.pid = spawnStep(stepDef, run, runId, step.lastBounceReason);
|
|
265
|
+
modified = true;
|
|
266
|
+
continue;
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
// ── Handle running steps: check if process died ──
|
|
270
|
+
if (step.status === 'running' && step.pid) {
|
|
271
|
+
if (!isAlive(step.pid)) {
|
|
272
|
+
// Process is dead — did agent signal?
|
|
273
|
+
if (!step.signaled) {
|
|
274
|
+
// Auto-fallback: check exit code
|
|
275
|
+
if (step.exitCode === 0 || step.exitCode === null) {
|
|
276
|
+
// Treat as success (agent forgot to signal)
|
|
277
|
+
step.status = 'success';
|
|
278
|
+
step.completedAt = new Date().toISOString();
|
|
279
|
+
console.error(`[pipeline] Step "${stepDef.name}" auto-completed (pid dead, exit: ${step.exitCode})`);
|
|
280
|
+
} else {
|
|
281
|
+
// Failed
|
|
282
|
+
step.status = 'failed';
|
|
283
|
+
step.completedAt = new Date().toISOString();
|
|
284
|
+
console.error(`[pipeline] Step "${stepDef.name}" failed (exit: ${step.exitCode})`);
|
|
285
|
+
if (pipeline.onError === 'stop') {
|
|
286
|
+
run.status = 'failed';
|
|
287
|
+
run.completedAt = new Date().toISOString();
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
modified = true;
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
continue;
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
// ── Handle pending steps: check trigger ──
|
|
297
|
+
if (step.status === 'pending') {
|
|
298
|
+
let shouldStart = false;
|
|
299
|
+
|
|
300
|
+
if (stepDef.trigger === 'start') {
|
|
301
|
+
// First step — always start
|
|
302
|
+
shouldStart = true;
|
|
303
|
+
} else if (stepDef.trigger?.type === 'on_complete') {
|
|
304
|
+
const depStep = run.steps[stepDef.trigger.step];
|
|
305
|
+
shouldStart = depStep?.status === 'success';
|
|
306
|
+
} else if (stepDef.trigger?.type === 'on_complete_all') {
|
|
307
|
+
// Fan-in: wait for ALL listed steps to complete
|
|
308
|
+
const deps = stepDef.trigger.steps || [];
|
|
309
|
+
shouldStart = deps.length > 0 && deps.every(
|
|
310
|
+
name => run.steps[name]?.status === 'success',
|
|
311
|
+
);
|
|
312
|
+
} else if (stepDef.trigger?.type === 'on_file') {
|
|
313
|
+
const filePath = join(run.cwd || cwd, stepDef.trigger.path);
|
|
314
|
+
if (existsSync(filePath)) {
|
|
315
|
+
// File exists — check if producing process is dead
|
|
316
|
+
const depStepName = pipeline.steps[pipeline.steps.indexOf(stepDef) - 1]?.name;
|
|
317
|
+
const depStep = depStepName ? run.steps[depStepName] : null;
|
|
318
|
+
if (!depStep?.pid || !isAlive(depStep.pid)) {
|
|
319
|
+
shouldStart = true;
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
if (shouldStart && run.status === 'running') {
|
|
325
|
+
step.status = 'running';
|
|
326
|
+
step.startedAt = new Date().toISOString();
|
|
327
|
+
step.pid = spawnStep(stepDef, run, runId);
|
|
328
|
+
modified = true;
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
// ── Handle waiting_bounce: restart when bounced step completes ──
|
|
333
|
+
if (step.status === 'waiting_bounce') {
|
|
334
|
+
const depStepName = stepDef.trigger?.step;
|
|
335
|
+
if (depStepName && run.steps[depStepName]?.status === 'success') {
|
|
336
|
+
step.status = 'running';
|
|
337
|
+
step.startedAt = new Date().toISOString();
|
|
338
|
+
step.pid = spawnStep(stepDef, run, runId);
|
|
339
|
+
modified = true;
|
|
340
|
+
}
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
// Check if all steps are done
|
|
345
|
+
const allDone = Object.values(run.steps).every(s =>
|
|
346
|
+
s.status === 'success' || s.status === 'failed' || s.status === 'skipped' || s.status === 'cancelled',
|
|
347
|
+
);
|
|
348
|
+
if (allDone && run.status === 'running') {
|
|
349
|
+
const hasFailed = Object.values(run.steps).some(s => s.status === 'failed');
|
|
350
|
+
run.status = hasFailed ? 'failed' : 'success';
|
|
351
|
+
run.completedAt = new Date().toISOString();
|
|
352
|
+
modified = true;
|
|
353
|
+
console.error(`[pipeline] Run ${runId} completed: ${run.status}`);
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
if (modified) {
|
|
357
|
+
writeFileSync(join(runsDir, file), JSON.stringify(run, null, 2));
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
return hasActive;
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
// ─── Main loop ──────────────────────────────────────────────
|
|
365
|
+
|
|
366
|
+
function tick() {
|
|
367
|
+
const now = new Date();
|
|
368
|
+
const schedules = readSchedules();
|
|
369
|
+
const hasActivePipeline = tickPipelines();
|
|
370
|
+
|
|
371
|
+
if (schedules.length === 0 && !hasActivePipeline) {
|
|
372
|
+
// No work — check for pipeline definitions before exiting
|
|
373
|
+
const pipelinesDir = join(cwd, PIPELINES_DIR);
|
|
374
|
+
const runsDir = join(cwd, RUNS_DIR);
|
|
375
|
+
const hasRuns = existsSync(runsDir) && readdirSync(runsDir).some(f => f.endsWith('.json'));
|
|
376
|
+
if (!hasRuns) {
|
|
377
|
+
console.error('[scheduler] No schedules or active pipelines. Daemon exiting.');
|
|
378
|
+
releaseLock();
|
|
379
|
+
process.exit(0);
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
for (const schedule of schedules) {
|
|
384
|
+
if (!schedule.cron || !schedule.prompt) continue;
|
|
385
|
+
|
|
386
|
+
// Check if cron matches current minute
|
|
387
|
+
if (!matchesCron(schedule.cron, now)) continue;
|
|
388
|
+
|
|
389
|
+
// Deduplicate: don't run if already ran this minute
|
|
390
|
+
if (schedule.lastRun) {
|
|
391
|
+
const lastRun = new Date(schedule.lastRun);
|
|
392
|
+
if (lastRun.getFullYear() === now.getFullYear() &&
|
|
393
|
+
lastRun.getMonth() === now.getMonth() &&
|
|
394
|
+
lastRun.getDate() === now.getDate() &&
|
|
395
|
+
lastRun.getHours() === now.getHours() &&
|
|
396
|
+
lastRun.getMinutes() === now.getMinutes()) {
|
|
397
|
+
continue; // Already ran this minute
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
// Atomic execution lock (prevents dual-daemon runs)
|
|
402
|
+
const lockFile = join(cwd, '.agent', 'locks', `${schedule.id}_${now.getTime()}.lock`);
|
|
403
|
+
try {
|
|
404
|
+
mkdirSync(dirname(lockFile), { recursive: true });
|
|
405
|
+
writeFileSync(lockFile, String(process.pid), { flag: 'wx' }); // wx = fail if exists
|
|
406
|
+
} catch {
|
|
407
|
+
continue; // Another daemon got the lock
|
|
408
|
+
}
|
|
409
|
+
|
|
410
|
+
executeSchedule(schedule);
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
// Adaptive polling: fast when pipeline active, slow otherwise
|
|
414
|
+
const nextTickMs = hasActivePipeline ? 3000 : 30000;
|
|
415
|
+
setTimeout(tick, nextTickMs);
|
|
416
|
+
}
|
|
417
|
+
|
|
418
|
+
// ─── Startup ────────────────────────────────────────────────
|
|
419
|
+
|
|
420
|
+
acquireLock();
|
|
421
|
+
|
|
422
|
+
process.on('SIGINT', () => { releaseLock(); process.exit(0); });
|
|
423
|
+
process.on('SIGTERM', () => { releaseLock(); process.exit(0); });
|
|
424
|
+
|
|
425
|
+
console.error(`[scheduler] Daemon started (pid ${process.pid}, cwd: ${cwd})`);
|
|
426
|
+
console.error(`[scheduler] Adaptive polling: 3s active / 30s idle`);
|
|
427
|
+
|
|
428
|
+
// Start the loop
|
|
429
|
+
tick();
|
|
430
|
+
|