@aion0/forge 0.2.1 → 0.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,514 @@
1
+ /**
2
+ * Pipeline Engine — DAG-based workflow orchestration on top of the Task system.
3
+ *
4
+ * Workflow YAML → Pipeline instance → Nodes executed as Tasks
5
+ * Supports: dependencies, output passing, conditional routing, parallel execution, notifications.
6
+ */
7
+
8
+ import { randomUUID } from 'node:crypto';
9
+ import { existsSync, readdirSync, readFileSync, writeFileSync, mkdirSync } from 'node:fs';
10
+ import { join } from 'node:path';
11
+ import { homedir } from 'node:os';
12
+ import YAML from 'yaml';
13
+ import { createTask, getTask, onTaskEvent } from './task-manager';
14
+ import { getProjectInfo } from './projects';
15
+ import { loadSettings } from './settings';
16
+ import type { Task } from '@/src/types';
17
+
18
+ const PIPELINES_DIR = join(homedir(), '.forge', 'pipelines');
19
+ const WORKFLOWS_DIR = join(homedir(), '.forge', 'flows');
20
+
21
+ // ─── Types ────────────────────────────────────────────────
22
+
23
+ export interface WorkflowNode {
24
+ id: string;
25
+ project: string;
26
+ prompt: string;
27
+ dependsOn: string[];
28
+ outputs: { name: string; extract: 'result' | 'git_diff' }[];
29
+ routes: { condition: string; next: string }[];
30
+ maxIterations: number;
31
+ }
32
+
33
+ export interface Workflow {
34
+ name: string;
35
+ description?: string;
36
+ vars: Record<string, string>;
37
+ input: Record<string, string>; // required input fields
38
+ nodes: Record<string, WorkflowNode>;
39
+ }
40
+
41
+ export type PipelineNodeStatus = 'pending' | 'running' | 'done' | 'failed' | 'skipped';
42
+
43
+ export interface PipelineNodeState {
44
+ status: PipelineNodeStatus;
45
+ taskId?: string;
46
+ outputs: Record<string, string>;
47
+ iterations: number;
48
+ startedAt?: string;
49
+ completedAt?: string;
50
+ error?: string;
51
+ }
52
+
53
+ export interface Pipeline {
54
+ id: string;
55
+ workflowName: string;
56
+ status: 'running' | 'done' | 'failed' | 'cancelled';
57
+ input: Record<string, string>;
58
+ vars: Record<string, string>;
59
+ nodes: Record<string, PipelineNodeState>;
60
+ nodeOrder: string[]; // for UI display
61
+ createdAt: string;
62
+ completedAt?: string;
63
+ }
64
+
65
+ // ─── Workflow Loading ─────────────────────────────────────
66
+
67
+ export function listWorkflows(): Workflow[] {
68
+ if (!existsSync(WORKFLOWS_DIR)) return [];
69
+ return readdirSync(WORKFLOWS_DIR)
70
+ .filter(f => f.endsWith('.yaml') || f.endsWith('.yml'))
71
+ .map(f => {
72
+ try {
73
+ return parseWorkflow(readFileSync(join(WORKFLOWS_DIR, f), 'utf-8'));
74
+ } catch {
75
+ return null;
76
+ }
77
+ })
78
+ .filter(Boolean) as Workflow[];
79
+ }
80
+
81
+ export function getWorkflow(name: string): Workflow | null {
82
+ return listWorkflows().find(w => w.name === name) || null;
83
+ }
84
+
85
+ function parseWorkflow(raw: string): Workflow {
86
+ const parsed = YAML.parse(raw);
87
+ const nodes: Record<string, WorkflowNode> = {};
88
+
89
+ for (const [id, def] of Object.entries(parsed.nodes || {})) {
90
+ const n = def as any;
91
+ nodes[id] = {
92
+ id,
93
+ project: n.project || '',
94
+ prompt: n.prompt || '',
95
+ dependsOn: n.depends_on || n.dependsOn || [],
96
+ outputs: (n.outputs || []).map((o: any) => ({
97
+ name: o.name,
98
+ extract: o.extract || 'result',
99
+ })),
100
+ routes: (n.routes || []).map((r: any) => ({
101
+ condition: r.condition || 'default',
102
+ next: r.next,
103
+ })),
104
+ maxIterations: n.max_iterations || n.maxIterations || 3,
105
+ };
106
+ }
107
+
108
+ return {
109
+ name: parsed.name || 'unnamed',
110
+ description: parsed.description,
111
+ vars: parsed.vars || {},
112
+ input: parsed.input || {},
113
+ nodes,
114
+ };
115
+ }
116
+
117
+ // ─── Pipeline Persistence ─────────────────────────────────
118
+
119
+ function ensureDir() {
120
+ if (!existsSync(PIPELINES_DIR)) mkdirSync(PIPELINES_DIR, { recursive: true });
121
+ }
122
+
123
+ function savePipeline(pipeline: Pipeline) {
124
+ ensureDir();
125
+ writeFileSync(join(PIPELINES_DIR, `${pipeline.id}.json`), JSON.stringify(pipeline, null, 2));
126
+ }
127
+
128
+ export function getPipeline(id: string): Pipeline | null {
129
+ try {
130
+ return JSON.parse(readFileSync(join(PIPELINES_DIR, `${id}.json`), 'utf-8'));
131
+ } catch {
132
+ return null;
133
+ }
134
+ }
135
+
136
+ export function deletePipeline(id: string): boolean {
137
+ const filePath = join(PIPELINES_DIR, `${id}.json`);
138
+ try {
139
+ if (existsSync(filePath)) {
140
+ const { unlinkSync } = require('node:fs');
141
+ unlinkSync(filePath);
142
+ return true;
143
+ }
144
+ } catch {}
145
+ return false;
146
+ }
147
+
148
+ export function listPipelines(): Pipeline[] {
149
+ ensureDir();
150
+ return readdirSync(PIPELINES_DIR)
151
+ .filter(f => f.endsWith('.json'))
152
+ .map(f => {
153
+ try {
154
+ return JSON.parse(readFileSync(join(PIPELINES_DIR, f), 'utf-8')) as Pipeline;
155
+ } catch {
156
+ return null;
157
+ }
158
+ })
159
+ .filter(Boolean) as Pipeline[];
160
+ }
161
+
162
+ // ─── Template Resolution ──────────────────────────────────
163
+
164
+ function resolveTemplate(template: string, ctx: {
165
+ input: Record<string, string>;
166
+ vars: Record<string, string>;
167
+ nodes: Record<string, PipelineNodeState>;
168
+ }): string {
169
+ return template.replace(/\{\{(.*?)\}\}/g, (_, expr) => {
170
+ const path = expr.trim();
171
+
172
+ // {{input.xxx}}
173
+ if (path.startsWith('input.')) return ctx.input[path.slice(6)] || '';
174
+
175
+ // {{vars.xxx}}
176
+ if (path.startsWith('vars.')) return ctx.vars[path.slice(5)] || '';
177
+
178
+ // {{nodes.xxx.outputs.yyy}}
179
+ const nodeMatch = path.match(/^nodes\.(\w+)\.outputs\.(\w+)$/);
180
+ if (nodeMatch) {
181
+ const [, nodeId, outputName] = nodeMatch;
182
+ return ctx.nodes[nodeId]?.outputs[outputName] || '';
183
+ }
184
+
185
+ return `{{${path}}}`;
186
+ });
187
+ }
188
+
189
+ // ─── Pipeline Execution ───────────────────────────────────
190
+
191
+ export function startPipeline(workflowName: string, input: Record<string, string>): Pipeline {
192
+ const workflow = getWorkflow(workflowName);
193
+ if (!workflow) throw new Error(`Workflow not found: ${workflowName}`);
194
+
195
+ const id = randomUUID().slice(0, 8);
196
+ const nodes: Record<string, PipelineNodeState> = {};
197
+ const nodeOrder = topologicalSort(workflow.nodes);
198
+
199
+ for (const nodeId of nodeOrder) {
200
+ nodes[nodeId] = {
201
+ status: 'pending',
202
+ outputs: {},
203
+ iterations: 0,
204
+ };
205
+ }
206
+
207
+ const pipeline: Pipeline = {
208
+ id,
209
+ workflowName,
210
+ status: 'running',
211
+ input,
212
+ vars: { ...workflow.vars },
213
+ nodes,
214
+ nodeOrder,
215
+ createdAt: new Date().toISOString(),
216
+ };
217
+
218
+ savePipeline(pipeline);
219
+
220
+ // Start nodes that have no dependencies
221
+ scheduleReadyNodes(pipeline, workflow);
222
+
223
+ // Listen for task completions
224
+ setupTaskListener(pipeline.id);
225
+
226
+ return pipeline;
227
+ }
228
+
229
+ export function cancelPipeline(id: string): boolean {
230
+ const pipeline = getPipeline(id);
231
+ if (!pipeline || pipeline.status !== 'running') return false;
232
+
233
+ pipeline.status = 'cancelled';
234
+ pipeline.completedAt = new Date().toISOString();
235
+
236
+ // Cancel all running tasks
237
+ for (const [, node] of Object.entries(pipeline.nodes)) {
238
+ if (node.status === 'running' && node.taskId) {
239
+ const { cancelTask } = require('./task-manager');
240
+ cancelTask(node.taskId);
241
+ }
242
+ if (node.status === 'pending') node.status = 'skipped';
243
+ }
244
+
245
+ savePipeline(pipeline);
246
+ return true;
247
+ }
248
+
249
+ // ─── Node Scheduling ──────────────────────────────────────
250
+
251
+ function scheduleReadyNodes(pipeline: Pipeline, workflow: Workflow) {
252
+ const ctx = { input: pipeline.input, vars: pipeline.vars, nodes: pipeline.nodes };
253
+
254
+ for (const nodeId of pipeline.nodeOrder) {
255
+ const nodeState = pipeline.nodes[nodeId];
256
+ if (nodeState.status !== 'pending') continue;
257
+
258
+ const nodeDef = workflow.nodes[nodeId];
259
+ if (!nodeDef) continue;
260
+
261
+ // Check all dependencies are done
262
+ const depsReady = nodeDef.dependsOn.every(dep => {
263
+ const depState = pipeline.nodes[dep];
264
+ return depState && depState.status === 'done';
265
+ });
266
+
267
+ // Check if any dependency failed (skip this node)
268
+ const depsFailed = nodeDef.dependsOn.some(dep => {
269
+ const depState = pipeline.nodes[dep];
270
+ return depState && (depState.status === 'failed' || depState.status === 'skipped');
271
+ });
272
+
273
+ if (depsFailed) {
274
+ nodeState.status = 'skipped';
275
+ savePipeline(pipeline);
276
+ continue;
277
+ }
278
+
279
+ if (!depsReady) continue;
280
+
281
+ // Resolve templates
282
+ const project = resolveTemplate(nodeDef.project, ctx);
283
+ const prompt = resolveTemplate(nodeDef.prompt, ctx);
284
+
285
+ const projectInfo = getProjectInfo(project);
286
+ if (!projectInfo) {
287
+ nodeState.status = 'failed';
288
+ nodeState.error = `Project not found: ${project}`;
289
+ savePipeline(pipeline);
290
+ notifyStep(pipeline, nodeId, 'failed', nodeState.error);
291
+ continue;
292
+ }
293
+
294
+ // Create task
295
+ const task = createTask({
296
+ projectName: projectInfo.name,
297
+ projectPath: projectInfo.path,
298
+ prompt,
299
+ });
300
+
301
+ nodeState.status = 'running';
302
+ nodeState.taskId = task.id;
303
+ nodeState.iterations++;
304
+ nodeState.startedAt = new Date().toISOString();
305
+ savePipeline(pipeline);
306
+
307
+ notifyStep(pipeline, nodeId, 'running');
308
+ }
309
+
310
+ // Check if pipeline is complete
311
+ checkPipelineCompletion(pipeline);
312
+ }
313
+
314
+ function checkPipelineCompletion(pipeline: Pipeline) {
315
+ const states = Object.values(pipeline.nodes);
316
+ const allDone = states.every(s => s.status === 'done' || s.status === 'skipped' || s.status === 'failed');
317
+
318
+ if (allDone && pipeline.status === 'running') {
319
+ const anyFailed = states.some(s => s.status === 'failed');
320
+ pipeline.status = anyFailed ? 'failed' : 'done';
321
+ pipeline.completedAt = new Date().toISOString();
322
+ savePipeline(pipeline);
323
+ notifyPipelineComplete(pipeline);
324
+ }
325
+ }
326
+
327
+ // ─── Task Event Listener ──────────────────────────────────
328
+
329
+ const activeListeners = new Set<string>();
330
+
331
+ function setupTaskListener(pipelineId: string) {
332
+ if (activeListeners.has(pipelineId)) return;
333
+ activeListeners.add(pipelineId);
334
+
335
+ const cleanup = onTaskEvent((taskId, event, data) => {
336
+ if (event !== 'status') return;
337
+ if (data !== 'done' && data !== 'failed') return;
338
+
339
+ const pipeline = getPipeline(pipelineId);
340
+ if (!pipeline || pipeline.status !== 'running') {
341
+ cleanup();
342
+ activeListeners.delete(pipelineId);
343
+ return;
344
+ }
345
+
346
+ // Find the node for this task
347
+ const nodeEntry = Object.entries(pipeline.nodes).find(([, n]) => n.taskId === taskId);
348
+ if (!nodeEntry) return;
349
+
350
+ const [nodeId, nodeState] = nodeEntry;
351
+ const workflow = getWorkflow(pipeline.workflowName);
352
+ if (!workflow) return;
353
+
354
+ const nodeDef = workflow.nodes[nodeId];
355
+ const task = getTask(taskId);
356
+
357
+ if (data === 'done' && task) {
358
+ // Extract outputs
359
+ for (const outputDef of nodeDef.outputs) {
360
+ if (outputDef.extract === 'result') {
361
+ nodeState.outputs[outputDef.name] = task.resultSummary || '';
362
+ } else if (outputDef.extract === 'git_diff') {
363
+ nodeState.outputs[outputDef.name] = task.gitDiff || '';
364
+ }
365
+ }
366
+
367
+ // Check routes for conditional next step
368
+ if (nodeDef.routes.length > 0) {
369
+ const nextNode = evaluateRoutes(nodeDef.routes, nodeState.outputs, pipeline);
370
+ if (nextNode && nextNode !== nodeId) {
371
+ // Route to next node — mark this as done
372
+ nodeState.status = 'done';
373
+ nodeState.completedAt = new Date().toISOString();
374
+ // Reset next node to pending so it gets scheduled
375
+ if (pipeline.nodes[nextNode] && pipeline.nodes[nextNode].status !== 'done') {
376
+ pipeline.nodes[nextNode].status = 'pending';
377
+ }
378
+ } else if (nextNode === nodeId) {
379
+ // Loop back — check iteration limit
380
+ if (nodeState.iterations < nodeDef.maxIterations) {
381
+ nodeState.status = 'pending';
382
+ nodeState.taskId = undefined;
383
+ } else {
384
+ nodeState.status = 'done';
385
+ nodeState.completedAt = new Date().toISOString();
386
+ }
387
+ } else {
388
+ nodeState.status = 'done';
389
+ nodeState.completedAt = new Date().toISOString();
390
+ }
391
+ } else {
392
+ nodeState.status = 'done';
393
+ nodeState.completedAt = new Date().toISOString();
394
+ }
395
+
396
+ savePipeline(pipeline);
397
+ notifyStep(pipeline, nodeId, 'done');
398
+ } else if (data === 'failed') {
399
+ nodeState.status = 'failed';
400
+ nodeState.error = task?.error || 'Task failed';
401
+ nodeState.completedAt = new Date().toISOString();
402
+ savePipeline(pipeline);
403
+ notifyStep(pipeline, nodeId, 'failed', nodeState.error);
404
+ }
405
+
406
+ // Schedule next ready nodes
407
+ scheduleReadyNodes(pipeline, workflow);
408
+ });
409
+ }
410
+
411
+ function evaluateRoutes(
412
+ routes: { condition: string; next: string }[],
413
+ outputs: Record<string, string>,
414
+ pipeline: Pipeline
415
+ ): string | null {
416
+ for (const route of routes) {
417
+ if (route.condition === 'default') return route.next;
418
+
419
+ // Simple "contains" check: {{outputs.xxx contains 'YYY'}}
420
+ const containsMatch = route.condition.match(/\{\{outputs\.(\w+)\s+contains\s+'([^']+)'\}\}/);
421
+ if (containsMatch) {
422
+ const [, outputName, keyword] = containsMatch;
423
+ if (outputs[outputName]?.includes(keyword)) return route.next;
424
+ continue;
425
+ }
426
+
427
+ // Default: treat as truthy check
428
+ return route.next;
429
+ }
430
+ return null;
431
+ }
432
+
433
+ // ─── Topological Sort ─────────────────────────────────────
434
+
435
+ function topologicalSort(nodes: Record<string, WorkflowNode>): string[] {
436
+ const sorted: string[] = [];
437
+ const visited = new Set<string>();
438
+ const visiting = new Set<string>();
439
+
440
+ function visit(id: string) {
441
+ if (visited.has(id)) return;
442
+ if (visiting.has(id)) return; // cycle — skip
443
+ visiting.add(id);
444
+
445
+ const node = nodes[id];
446
+ if (node) {
447
+ for (const dep of node.dependsOn) {
448
+ visit(dep);
449
+ }
450
+ // Also add route targets
451
+ for (const route of node.routes) {
452
+ if (nodes[route.next] && !node.dependsOn.includes(route.next)) {
453
+ // Don't visit route targets in topo sort to avoid cycles
454
+ }
455
+ }
456
+ }
457
+
458
+ visiting.delete(id);
459
+ visited.add(id);
460
+ sorted.push(id);
461
+ }
462
+
463
+ for (const id of Object.keys(nodes)) {
464
+ visit(id);
465
+ }
466
+
467
+ return sorted;
468
+ }
469
+
470
+ // ─── Notifications ────────────────────────────────────────
471
+
472
+ async function notifyStep(pipeline: Pipeline, nodeId: string, status: string, error?: string) {
473
+ const settings = loadSettings();
474
+ if (!settings.telegramBotToken || !settings.telegramChatId) return;
475
+
476
+ const icon = status === 'done' ? '✅' : status === 'failed' ? '❌' : status === 'running' ? '🔄' : '⏳';
477
+ const msg = `${icon} Pipeline ${pipeline.id}/${nodeId}: ${status}${error ? `\n${error}` : ''}`;
478
+
479
+ try {
480
+ await fetch(`https://api.telegram.org/bot${settings.telegramBotToken}/sendMessage`, {
481
+ method: 'POST',
482
+ headers: { 'Content-Type': 'application/json' },
483
+ body: JSON.stringify({
484
+ chat_id: settings.telegramChatId.split(',')[0].trim(),
485
+ text: msg,
486
+ disable_web_page_preview: true,
487
+ }),
488
+ });
489
+ } catch {}
490
+ }
491
+
492
+ async function notifyPipelineComplete(pipeline: Pipeline) {
493
+ const settings = loadSettings();
494
+ if (!settings.telegramBotToken || !settings.telegramChatId) return;
495
+
496
+ const icon = pipeline.status === 'done' ? '🎉' : '💥';
497
+ const nodes = Object.entries(pipeline.nodes)
498
+ .map(([id, n]) => ` ${n.status === 'done' ? '✅' : n.status === 'failed' ? '❌' : '⏭'} ${id}`)
499
+ .join('\n');
500
+
501
+ const msg = `${icon} Pipeline ${pipeline.id} (${pipeline.workflowName}) ${pipeline.status}\n\n${nodes}`;
502
+
503
+ try {
504
+ await fetch(`https://api.telegram.org/bot${settings.telegramBotToken}/sendMessage`, {
505
+ method: 'POST',
506
+ headers: { 'Content-Type': 'application/json' },
507
+ body: JSON.stringify({
508
+ chat_id: settings.telegramChatId.split(',')[0].trim(),
509
+ text: msg,
510
+ disable_web_page_preview: true,
511
+ }),
512
+ });
513
+ } catch {}
514
+ }
@@ -12,8 +12,10 @@ import { loadSettings } from './settings';
12
12
  import { notifyTaskComplete, notifyTaskFailed } from './notify';
13
13
  import type { Task, TaskLogEntry, TaskStatus, TaskMode, WatchConfig } from '@/src/types';
14
14
 
15
- let runner: ReturnType<typeof setInterval> | null = null;
16
- let currentTaskId: string | null = null;
15
+ const runnerKey = Symbol.for('mw-task-runner');
16
+ const gRunner = globalThis as any;
17
+ if (!gRunner[runnerKey]) gRunner[runnerKey] = { runner: null, currentTaskId: null };
18
+ const runnerState: { runner: ReturnType<typeof setInterval> | null; currentTaskId: string | null } = gRunner[runnerKey];
17
19
 
18
20
  // Per-project concurrency: track which projects have a running prompt task
19
21
  const runningProjects = new Set<string>();
@@ -133,7 +135,7 @@ export function deleteTask(id: string): boolean {
133
135
  return true;
134
136
  }
135
137
 
136
- export function updateTask(id: string, updates: { prompt?: string; projectName?: string; projectPath?: string; priority?: number; restart?: boolean }): Task | null {
138
+ export function updateTask(id: string, updates: { prompt?: string; projectName?: string; projectPath?: string; priority?: number; scheduledAt?: string; restart?: boolean }): Task | null {
137
139
  const task = getTask(id);
138
140
  if (!task) return null;
139
141
 
@@ -146,6 +148,7 @@ export function updateTask(id: string, updates: { prompt?: string; projectName?:
146
148
  if (updates.projectName !== undefined) { fields.push('project_name = ?'); values.push(updates.projectName); }
147
149
  if (updates.projectPath !== undefined) { fields.push('project_path = ?'); values.push(updates.projectPath); }
148
150
  if (updates.priority !== undefined) { fields.push('priority = ?'); values.push(updates.priority); }
151
+ if (updates.scheduledAt !== undefined) { fields.push('scheduled_at = ?'); values.push(updates.scheduledAt || null); }
149
152
 
150
153
  // Reset to queued so it runs again
151
154
  if (updates.restart) {
@@ -179,16 +182,16 @@ export function retryTask(id: string): Task | null {
179
182
  // ─── Background Runner ───────────────────────────────────────
180
183
 
181
184
  export function ensureRunnerStarted() {
182
- if (runner) return;
183
- runner = setInterval(processNextTask, 3000);
185
+ if (runnerState.runner) return;
186
+ runnerState.runner = setInterval(processNextTask, 3000);
184
187
  // Also try immediately
185
188
  processNextTask();
186
189
  }
187
190
 
188
191
  export function stopRunner() {
189
- if (runner) {
190
- clearInterval(runner);
191
- runner = null;
192
+ if (runnerState.runner) {
193
+ clearInterval(runnerState.runner);
194
+ runnerState.runner = null;
192
195
  }
193
196
  }
194
197
 
@@ -196,7 +199,7 @@ async function processNextTask() {
196
199
  // Find all queued tasks ready to run
197
200
  const queued = db().prepare(`
198
201
  SELECT * FROM tasks WHERE status = 'queued'
199
- AND (scheduled_at IS NULL OR scheduled_at <= datetime('now'))
202
+ AND (scheduled_at IS NULL OR replace(replace(scheduled_at, 'T', ' '), 'Z', '') <= datetime('now'))
200
203
  ORDER BY priority DESC, created_at ASC
201
204
  `).all() as any[];
202
205
 
@@ -214,7 +217,7 @@ async function processNextTask() {
214
217
 
215
218
  // Run this task
216
219
  runningProjects.add(task.projectName);
217
- currentTaskId = task.id;
220
+ runnerState.currentTaskId = task.id;
218
221
 
219
222
  // Execute async — don't await so we can process tasks for other projects in parallel
220
223
  executeTask(task)
@@ -224,7 +227,7 @@ async function processNextTask() {
224
227
  })
225
228
  .finally(() => {
226
229
  runningProjects.delete(task.projectName);
227
- if (currentTaskId === task.id) currentTaskId = null;
230
+ if (runnerState.currentTaskId === task.id) runnerState.currentTaskId = null;
228
231
  });
229
232
  }
230
233
  }
@@ -234,7 +237,7 @@ function executeTask(task: Task): Promise<void> {
234
237
  const settings = loadSettings();
235
238
  const claudePath = settings.claudePath || process.env.CLAUDE_PATH || 'claude';
236
239
 
237
- const args = ['-p', '--verbose', '--output-format', 'stream-json'];
240
+ const args = ['-p', '--verbose', '--output-format', 'stream-json', '--dangerously-skip-permissions'];
238
241
 
239
242
  // Resume specific session to continue the conversation
240
243
  if (task.conversationId) {
@@ -352,12 +355,14 @@ function executeTask(task: Task): Promise<void> {
352
355
  emit(task.id, 'status', 'done');
353
356
  const doneTask = getTask(task.id);
354
357
  if (doneTask) notifyTaskComplete(doneTask).catch(() => {});
358
+ notifyTerminalSession(task, 'done', sessionId);
355
359
  resolve();
356
360
  } else {
357
361
  const errMsg = `Process exited with code ${code}`;
358
362
  updateTaskStatus(task.id, 'failed', errMsg);
359
363
  const failedTask = getTask(task.id);
360
364
  if (failedTask) notifyTaskFailed(failedTask).catch(() => {});
365
+ notifyTerminalSession(task, 'failed', sessionId);
361
366
  reject(new Error(errMsg));
362
367
  }
363
368
  });
@@ -369,6 +374,59 @@ function executeTask(task: Task): Promise<void> {
369
374
  });
370
375
  }
371
376
 
377
+ // ─── Terminal notification ────────────────────────────────────
378
+
379
+ /**
380
+ * Notify tmux terminal sessions in the same project directory that a task completed.
381
+ * Sends a visible bell character so the user knows to resume.
382
+ */
383
+ function notifyTerminalSession(task: Task, status: 'done' | 'failed', sessionId?: string) {
384
+ try {
385
+ const out = execSync(
386
+ `tmux list-sessions -F "#{session_name}" 2>/dev/null`,
387
+ { encoding: 'utf-8', timeout: 3000 }
388
+ ).trim();
389
+ if (!out) return;
390
+
391
+ for (const name of out.split('\n')) {
392
+ if (!name.startsWith('mw-')) continue;
393
+ try {
394
+ const cwd = execSync(
395
+ `tmux display-message -p -t ${name} '#{pane_current_path}'`,
396
+ { encoding: 'utf-8', timeout: 2000 }
397
+ ).trim();
398
+
399
+ // Match: same dir, parent dir, or child dir
400
+ const match = cwd && (
401
+ cwd === task.projectPath ||
402
+ cwd.startsWith(task.projectPath + '/') ||
403
+ task.projectPath.startsWith(cwd + '/')
404
+ );
405
+ if (!match) continue;
406
+
407
+ const paneCmd = execSync(
408
+ `tmux display-message -p -t ${name} '#{pane_current_command}'`,
409
+ { encoding: 'utf-8', timeout: 2000 }
410
+ ).trim();
411
+
412
+ if (status === 'done') {
413
+ const summary = task.prompt.slice(0, 80).replace(/"/g, "'");
414
+ const msg = `A background task just completed. Task: "${summary}". Please check git diff and continue.`;
415
+
416
+ // If a process is running (claude/node), send as input
417
+ if (paneCmd !== 'zsh' && paneCmd !== 'bash' && paneCmd !== 'fish') {
418
+ execSync(`tmux send-keys -t ${name} -- "${msg.replace(/"/g, '\\"')}" Enter`, { timeout: 2000 });
419
+ } else {
420
+ execSync(`tmux display-message -t ${name} "✅ Task ${task.id} done — changes ready"`, { timeout: 2000 });
421
+ }
422
+ } else {
423
+ execSync(`tmux display-message -t ${name} "❌ Task ${task.id} failed"`, { timeout: 2000 });
424
+ }
425
+ } catch {}
426
+ }
427
+ } catch {}
428
+ }
429
+
372
430
  // ─── Helpers ─────────────────────────────────────────────────
373
431
 
374
432
  /**