@aion0/forge 0.2.2 → 0.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +166 -175
- package/app/api/pipelines/[id]/route.ts +28 -0
- package/app/api/pipelines/route.ts +52 -0
- package/components/Dashboard.tsx +19 -1
- package/components/DocsViewer.tsx +10 -1
- package/components/PipelineEditor.tsx +399 -0
- package/components/PipelineView.tsx +435 -0
- package/lib/pipeline.ts +514 -0
- package/package.json +2 -1
package/lib/pipeline.ts
ADDED
|
@@ -0,0 +1,514 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Pipeline Engine — DAG-based workflow orchestration on top of the Task system.
|
|
3
|
+
*
|
|
4
|
+
* Workflow YAML → Pipeline instance → Nodes executed as Tasks
|
|
5
|
+
* Supports: dependencies, output passing, conditional routing, parallel execution, notifications.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { randomUUID } from 'node:crypto';
|
|
9
|
+
import { existsSync, readdirSync, readFileSync, writeFileSync, mkdirSync } from 'node:fs';
|
|
10
|
+
import { join } from 'node:path';
|
|
11
|
+
import { homedir } from 'node:os';
|
|
12
|
+
import YAML from 'yaml';
|
|
13
|
+
import { createTask, getTask, onTaskEvent } from './task-manager';
|
|
14
|
+
import { getProjectInfo } from './projects';
|
|
15
|
+
import { loadSettings } from './settings';
|
|
16
|
+
import type { Task } from '@/src/types';
|
|
17
|
+
|
|
18
|
+
const PIPELINES_DIR = join(homedir(), '.forge', 'pipelines');
|
|
19
|
+
const WORKFLOWS_DIR = join(homedir(), '.forge', 'flows');
|
|
20
|
+
|
|
21
|
+
// ─── Types ────────────────────────────────────────────────
|
|
22
|
+
|
|
23
|
+
export interface WorkflowNode {
|
|
24
|
+
id: string;
|
|
25
|
+
project: string;
|
|
26
|
+
prompt: string;
|
|
27
|
+
dependsOn: string[];
|
|
28
|
+
outputs: { name: string; extract: 'result' | 'git_diff' }[];
|
|
29
|
+
routes: { condition: string; next: string }[];
|
|
30
|
+
maxIterations: number;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export interface Workflow {
|
|
34
|
+
name: string;
|
|
35
|
+
description?: string;
|
|
36
|
+
vars: Record<string, string>;
|
|
37
|
+
input: Record<string, string>; // required input fields
|
|
38
|
+
nodes: Record<string, WorkflowNode>;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
export type PipelineNodeStatus = 'pending' | 'running' | 'done' | 'failed' | 'skipped';
|
|
42
|
+
|
|
43
|
+
export interface PipelineNodeState {
|
|
44
|
+
status: PipelineNodeStatus;
|
|
45
|
+
taskId?: string;
|
|
46
|
+
outputs: Record<string, string>;
|
|
47
|
+
iterations: number;
|
|
48
|
+
startedAt?: string;
|
|
49
|
+
completedAt?: string;
|
|
50
|
+
error?: string;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export interface Pipeline {
|
|
54
|
+
id: string;
|
|
55
|
+
workflowName: string;
|
|
56
|
+
status: 'running' | 'done' | 'failed' | 'cancelled';
|
|
57
|
+
input: Record<string, string>;
|
|
58
|
+
vars: Record<string, string>;
|
|
59
|
+
nodes: Record<string, PipelineNodeState>;
|
|
60
|
+
nodeOrder: string[]; // for UI display
|
|
61
|
+
createdAt: string;
|
|
62
|
+
completedAt?: string;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// ─── Workflow Loading ─────────────────────────────────────
|
|
66
|
+
|
|
67
|
+
export function listWorkflows(): Workflow[] {
|
|
68
|
+
if (!existsSync(WORKFLOWS_DIR)) return [];
|
|
69
|
+
return readdirSync(WORKFLOWS_DIR)
|
|
70
|
+
.filter(f => f.endsWith('.yaml') || f.endsWith('.yml'))
|
|
71
|
+
.map(f => {
|
|
72
|
+
try {
|
|
73
|
+
return parseWorkflow(readFileSync(join(WORKFLOWS_DIR, f), 'utf-8'));
|
|
74
|
+
} catch {
|
|
75
|
+
return null;
|
|
76
|
+
}
|
|
77
|
+
})
|
|
78
|
+
.filter(Boolean) as Workflow[];
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
export function getWorkflow(name: string): Workflow | null {
|
|
82
|
+
return listWorkflows().find(w => w.name === name) || null;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function parseWorkflow(raw: string): Workflow {
|
|
86
|
+
const parsed = YAML.parse(raw);
|
|
87
|
+
const nodes: Record<string, WorkflowNode> = {};
|
|
88
|
+
|
|
89
|
+
for (const [id, def] of Object.entries(parsed.nodes || {})) {
|
|
90
|
+
const n = def as any;
|
|
91
|
+
nodes[id] = {
|
|
92
|
+
id,
|
|
93
|
+
project: n.project || '',
|
|
94
|
+
prompt: n.prompt || '',
|
|
95
|
+
dependsOn: n.depends_on || n.dependsOn || [],
|
|
96
|
+
outputs: (n.outputs || []).map((o: any) => ({
|
|
97
|
+
name: o.name,
|
|
98
|
+
extract: o.extract || 'result',
|
|
99
|
+
})),
|
|
100
|
+
routes: (n.routes || []).map((r: any) => ({
|
|
101
|
+
condition: r.condition || 'default',
|
|
102
|
+
next: r.next,
|
|
103
|
+
})),
|
|
104
|
+
maxIterations: n.max_iterations || n.maxIterations || 3,
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
return {
|
|
109
|
+
name: parsed.name || 'unnamed',
|
|
110
|
+
description: parsed.description,
|
|
111
|
+
vars: parsed.vars || {},
|
|
112
|
+
input: parsed.input || {},
|
|
113
|
+
nodes,
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// ─── Pipeline Persistence ─────────────────────────────────
|
|
118
|
+
|
|
119
|
+
function ensureDir() {
|
|
120
|
+
if (!existsSync(PIPELINES_DIR)) mkdirSync(PIPELINES_DIR, { recursive: true });
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
function savePipeline(pipeline: Pipeline) {
|
|
124
|
+
ensureDir();
|
|
125
|
+
writeFileSync(join(PIPELINES_DIR, `${pipeline.id}.json`), JSON.stringify(pipeline, null, 2));
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
export function getPipeline(id: string): Pipeline | null {
|
|
129
|
+
try {
|
|
130
|
+
return JSON.parse(readFileSync(join(PIPELINES_DIR, `${id}.json`), 'utf-8'));
|
|
131
|
+
} catch {
|
|
132
|
+
return null;
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
export function deletePipeline(id: string): boolean {
|
|
137
|
+
const filePath = join(PIPELINES_DIR, `${id}.json`);
|
|
138
|
+
try {
|
|
139
|
+
if (existsSync(filePath)) {
|
|
140
|
+
const { unlinkSync } = require('node:fs');
|
|
141
|
+
unlinkSync(filePath);
|
|
142
|
+
return true;
|
|
143
|
+
}
|
|
144
|
+
} catch {}
|
|
145
|
+
return false;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
export function listPipelines(): Pipeline[] {
|
|
149
|
+
ensureDir();
|
|
150
|
+
return readdirSync(PIPELINES_DIR)
|
|
151
|
+
.filter(f => f.endsWith('.json'))
|
|
152
|
+
.map(f => {
|
|
153
|
+
try {
|
|
154
|
+
return JSON.parse(readFileSync(join(PIPELINES_DIR, f), 'utf-8')) as Pipeline;
|
|
155
|
+
} catch {
|
|
156
|
+
return null;
|
|
157
|
+
}
|
|
158
|
+
})
|
|
159
|
+
.filter(Boolean) as Pipeline[];
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// ─── Template Resolution ──────────────────────────────────
|
|
163
|
+
|
|
164
|
+
function resolveTemplate(template: string, ctx: {
|
|
165
|
+
input: Record<string, string>;
|
|
166
|
+
vars: Record<string, string>;
|
|
167
|
+
nodes: Record<string, PipelineNodeState>;
|
|
168
|
+
}): string {
|
|
169
|
+
return template.replace(/\{\{(.*?)\}\}/g, (_, expr) => {
|
|
170
|
+
const path = expr.trim();
|
|
171
|
+
|
|
172
|
+
// {{input.xxx}}
|
|
173
|
+
if (path.startsWith('input.')) return ctx.input[path.slice(6)] || '';
|
|
174
|
+
|
|
175
|
+
// {{vars.xxx}}
|
|
176
|
+
if (path.startsWith('vars.')) return ctx.vars[path.slice(5)] || '';
|
|
177
|
+
|
|
178
|
+
// {{nodes.xxx.outputs.yyy}}
|
|
179
|
+
const nodeMatch = path.match(/^nodes\.(\w+)\.outputs\.(\w+)$/);
|
|
180
|
+
if (nodeMatch) {
|
|
181
|
+
const [, nodeId, outputName] = nodeMatch;
|
|
182
|
+
return ctx.nodes[nodeId]?.outputs[outputName] || '';
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
return `{{${path}}}`;
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// ─── Pipeline Execution ───────────────────────────────────
|
|
190
|
+
|
|
191
|
+
export function startPipeline(workflowName: string, input: Record<string, string>): Pipeline {
|
|
192
|
+
const workflow = getWorkflow(workflowName);
|
|
193
|
+
if (!workflow) throw new Error(`Workflow not found: ${workflowName}`);
|
|
194
|
+
|
|
195
|
+
const id = randomUUID().slice(0, 8);
|
|
196
|
+
const nodes: Record<string, PipelineNodeState> = {};
|
|
197
|
+
const nodeOrder = topologicalSort(workflow.nodes);
|
|
198
|
+
|
|
199
|
+
for (const nodeId of nodeOrder) {
|
|
200
|
+
nodes[nodeId] = {
|
|
201
|
+
status: 'pending',
|
|
202
|
+
outputs: {},
|
|
203
|
+
iterations: 0,
|
|
204
|
+
};
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
const pipeline: Pipeline = {
|
|
208
|
+
id,
|
|
209
|
+
workflowName,
|
|
210
|
+
status: 'running',
|
|
211
|
+
input,
|
|
212
|
+
vars: { ...workflow.vars },
|
|
213
|
+
nodes,
|
|
214
|
+
nodeOrder,
|
|
215
|
+
createdAt: new Date().toISOString(),
|
|
216
|
+
};
|
|
217
|
+
|
|
218
|
+
savePipeline(pipeline);
|
|
219
|
+
|
|
220
|
+
// Start nodes that have no dependencies
|
|
221
|
+
scheduleReadyNodes(pipeline, workflow);
|
|
222
|
+
|
|
223
|
+
// Listen for task completions
|
|
224
|
+
setupTaskListener(pipeline.id);
|
|
225
|
+
|
|
226
|
+
return pipeline;
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
export function cancelPipeline(id: string): boolean {
|
|
230
|
+
const pipeline = getPipeline(id);
|
|
231
|
+
if (!pipeline || pipeline.status !== 'running') return false;
|
|
232
|
+
|
|
233
|
+
pipeline.status = 'cancelled';
|
|
234
|
+
pipeline.completedAt = new Date().toISOString();
|
|
235
|
+
|
|
236
|
+
// Cancel all running tasks
|
|
237
|
+
for (const [, node] of Object.entries(pipeline.nodes)) {
|
|
238
|
+
if (node.status === 'running' && node.taskId) {
|
|
239
|
+
const { cancelTask } = require('./task-manager');
|
|
240
|
+
cancelTask(node.taskId);
|
|
241
|
+
}
|
|
242
|
+
if (node.status === 'pending') node.status = 'skipped';
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
savePipeline(pipeline);
|
|
246
|
+
return true;
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
// ─── Node Scheduling ──────────────────────────────────────
|
|
250
|
+
|
|
251
|
+
function scheduleReadyNodes(pipeline: Pipeline, workflow: Workflow) {
|
|
252
|
+
const ctx = { input: pipeline.input, vars: pipeline.vars, nodes: pipeline.nodes };
|
|
253
|
+
|
|
254
|
+
for (const nodeId of pipeline.nodeOrder) {
|
|
255
|
+
const nodeState = pipeline.nodes[nodeId];
|
|
256
|
+
if (nodeState.status !== 'pending') continue;
|
|
257
|
+
|
|
258
|
+
const nodeDef = workflow.nodes[nodeId];
|
|
259
|
+
if (!nodeDef) continue;
|
|
260
|
+
|
|
261
|
+
// Check all dependencies are done
|
|
262
|
+
const depsReady = nodeDef.dependsOn.every(dep => {
|
|
263
|
+
const depState = pipeline.nodes[dep];
|
|
264
|
+
return depState && depState.status === 'done';
|
|
265
|
+
});
|
|
266
|
+
|
|
267
|
+
// Check if any dependency failed (skip this node)
|
|
268
|
+
const depsFailed = nodeDef.dependsOn.some(dep => {
|
|
269
|
+
const depState = pipeline.nodes[dep];
|
|
270
|
+
return depState && (depState.status === 'failed' || depState.status === 'skipped');
|
|
271
|
+
});
|
|
272
|
+
|
|
273
|
+
if (depsFailed) {
|
|
274
|
+
nodeState.status = 'skipped';
|
|
275
|
+
savePipeline(pipeline);
|
|
276
|
+
continue;
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
if (!depsReady) continue;
|
|
280
|
+
|
|
281
|
+
// Resolve templates
|
|
282
|
+
const project = resolveTemplate(nodeDef.project, ctx);
|
|
283
|
+
const prompt = resolveTemplate(nodeDef.prompt, ctx);
|
|
284
|
+
|
|
285
|
+
const projectInfo = getProjectInfo(project);
|
|
286
|
+
if (!projectInfo) {
|
|
287
|
+
nodeState.status = 'failed';
|
|
288
|
+
nodeState.error = `Project not found: ${project}`;
|
|
289
|
+
savePipeline(pipeline);
|
|
290
|
+
notifyStep(pipeline, nodeId, 'failed', nodeState.error);
|
|
291
|
+
continue;
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
// Create task
|
|
295
|
+
const task = createTask({
|
|
296
|
+
projectName: projectInfo.name,
|
|
297
|
+
projectPath: projectInfo.path,
|
|
298
|
+
prompt,
|
|
299
|
+
});
|
|
300
|
+
|
|
301
|
+
nodeState.status = 'running';
|
|
302
|
+
nodeState.taskId = task.id;
|
|
303
|
+
nodeState.iterations++;
|
|
304
|
+
nodeState.startedAt = new Date().toISOString();
|
|
305
|
+
savePipeline(pipeline);
|
|
306
|
+
|
|
307
|
+
notifyStep(pipeline, nodeId, 'running');
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
// Check if pipeline is complete
|
|
311
|
+
checkPipelineCompletion(pipeline);
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
function checkPipelineCompletion(pipeline: Pipeline) {
|
|
315
|
+
const states = Object.values(pipeline.nodes);
|
|
316
|
+
const allDone = states.every(s => s.status === 'done' || s.status === 'skipped' || s.status === 'failed');
|
|
317
|
+
|
|
318
|
+
if (allDone && pipeline.status === 'running') {
|
|
319
|
+
const anyFailed = states.some(s => s.status === 'failed');
|
|
320
|
+
pipeline.status = anyFailed ? 'failed' : 'done';
|
|
321
|
+
pipeline.completedAt = new Date().toISOString();
|
|
322
|
+
savePipeline(pipeline);
|
|
323
|
+
notifyPipelineComplete(pipeline);
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
// ─── Task Event Listener ──────────────────────────────────
|
|
328
|
+
|
|
329
|
+
const activeListeners = new Set<string>();
|
|
330
|
+
|
|
331
|
+
function setupTaskListener(pipelineId: string) {
|
|
332
|
+
if (activeListeners.has(pipelineId)) return;
|
|
333
|
+
activeListeners.add(pipelineId);
|
|
334
|
+
|
|
335
|
+
const cleanup = onTaskEvent((taskId, event, data) => {
|
|
336
|
+
if (event !== 'status') return;
|
|
337
|
+
if (data !== 'done' && data !== 'failed') return;
|
|
338
|
+
|
|
339
|
+
const pipeline = getPipeline(pipelineId);
|
|
340
|
+
if (!pipeline || pipeline.status !== 'running') {
|
|
341
|
+
cleanup();
|
|
342
|
+
activeListeners.delete(pipelineId);
|
|
343
|
+
return;
|
|
344
|
+
}
|
|
345
|
+
|
|
346
|
+
// Find the node for this task
|
|
347
|
+
const nodeEntry = Object.entries(pipeline.nodes).find(([, n]) => n.taskId === taskId);
|
|
348
|
+
if (!nodeEntry) return;
|
|
349
|
+
|
|
350
|
+
const [nodeId, nodeState] = nodeEntry;
|
|
351
|
+
const workflow = getWorkflow(pipeline.workflowName);
|
|
352
|
+
if (!workflow) return;
|
|
353
|
+
|
|
354
|
+
const nodeDef = workflow.nodes[nodeId];
|
|
355
|
+
const task = getTask(taskId);
|
|
356
|
+
|
|
357
|
+
if (data === 'done' && task) {
|
|
358
|
+
// Extract outputs
|
|
359
|
+
for (const outputDef of nodeDef.outputs) {
|
|
360
|
+
if (outputDef.extract === 'result') {
|
|
361
|
+
nodeState.outputs[outputDef.name] = task.resultSummary || '';
|
|
362
|
+
} else if (outputDef.extract === 'git_diff') {
|
|
363
|
+
nodeState.outputs[outputDef.name] = task.gitDiff || '';
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
// Check routes for conditional next step
|
|
368
|
+
if (nodeDef.routes.length > 0) {
|
|
369
|
+
const nextNode = evaluateRoutes(nodeDef.routes, nodeState.outputs, pipeline);
|
|
370
|
+
if (nextNode && nextNode !== nodeId) {
|
|
371
|
+
// Route to next node — mark this as done
|
|
372
|
+
nodeState.status = 'done';
|
|
373
|
+
nodeState.completedAt = new Date().toISOString();
|
|
374
|
+
// Reset next node to pending so it gets scheduled
|
|
375
|
+
if (pipeline.nodes[nextNode] && pipeline.nodes[nextNode].status !== 'done') {
|
|
376
|
+
pipeline.nodes[nextNode].status = 'pending';
|
|
377
|
+
}
|
|
378
|
+
} else if (nextNode === nodeId) {
|
|
379
|
+
// Loop back — check iteration limit
|
|
380
|
+
if (nodeState.iterations < nodeDef.maxIterations) {
|
|
381
|
+
nodeState.status = 'pending';
|
|
382
|
+
nodeState.taskId = undefined;
|
|
383
|
+
} else {
|
|
384
|
+
nodeState.status = 'done';
|
|
385
|
+
nodeState.completedAt = new Date().toISOString();
|
|
386
|
+
}
|
|
387
|
+
} else {
|
|
388
|
+
nodeState.status = 'done';
|
|
389
|
+
nodeState.completedAt = new Date().toISOString();
|
|
390
|
+
}
|
|
391
|
+
} else {
|
|
392
|
+
nodeState.status = 'done';
|
|
393
|
+
nodeState.completedAt = new Date().toISOString();
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
savePipeline(pipeline);
|
|
397
|
+
notifyStep(pipeline, nodeId, 'done');
|
|
398
|
+
} else if (data === 'failed') {
|
|
399
|
+
nodeState.status = 'failed';
|
|
400
|
+
nodeState.error = task?.error || 'Task failed';
|
|
401
|
+
nodeState.completedAt = new Date().toISOString();
|
|
402
|
+
savePipeline(pipeline);
|
|
403
|
+
notifyStep(pipeline, nodeId, 'failed', nodeState.error);
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
// Schedule next ready nodes
|
|
407
|
+
scheduleReadyNodes(pipeline, workflow);
|
|
408
|
+
});
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
function evaluateRoutes(
|
|
412
|
+
routes: { condition: string; next: string }[],
|
|
413
|
+
outputs: Record<string, string>,
|
|
414
|
+
pipeline: Pipeline
|
|
415
|
+
): string | null {
|
|
416
|
+
for (const route of routes) {
|
|
417
|
+
if (route.condition === 'default') return route.next;
|
|
418
|
+
|
|
419
|
+
// Simple "contains" check: {{outputs.xxx contains 'YYY'}}
|
|
420
|
+
const containsMatch = route.condition.match(/\{\{outputs\.(\w+)\s+contains\s+'([^']+)'\}\}/);
|
|
421
|
+
if (containsMatch) {
|
|
422
|
+
const [, outputName, keyword] = containsMatch;
|
|
423
|
+
if (outputs[outputName]?.includes(keyword)) return route.next;
|
|
424
|
+
continue;
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
// Default: treat as truthy check
|
|
428
|
+
return route.next;
|
|
429
|
+
}
|
|
430
|
+
return null;
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
// ─── Topological Sort ─────────────────────────────────────
|
|
434
|
+
|
|
435
|
+
function topologicalSort(nodes: Record<string, WorkflowNode>): string[] {
|
|
436
|
+
const sorted: string[] = [];
|
|
437
|
+
const visited = new Set<string>();
|
|
438
|
+
const visiting = new Set<string>();
|
|
439
|
+
|
|
440
|
+
function visit(id: string) {
|
|
441
|
+
if (visited.has(id)) return;
|
|
442
|
+
if (visiting.has(id)) return; // cycle — skip
|
|
443
|
+
visiting.add(id);
|
|
444
|
+
|
|
445
|
+
const node = nodes[id];
|
|
446
|
+
if (node) {
|
|
447
|
+
for (const dep of node.dependsOn) {
|
|
448
|
+
visit(dep);
|
|
449
|
+
}
|
|
450
|
+
// Also add route targets
|
|
451
|
+
for (const route of node.routes) {
|
|
452
|
+
if (nodes[route.next] && !node.dependsOn.includes(route.next)) {
|
|
453
|
+
// Don't visit route targets in topo sort to avoid cycles
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
visiting.delete(id);
|
|
459
|
+
visited.add(id);
|
|
460
|
+
sorted.push(id);
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
for (const id of Object.keys(nodes)) {
|
|
464
|
+
visit(id);
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
return sorted;
|
|
468
|
+
}
|
|
469
|
+
|
|
470
|
+
// ─── Notifications ────────────────────────────────────────
|
|
471
|
+
|
|
472
|
+
async function notifyStep(pipeline: Pipeline, nodeId: string, status: string, error?: string) {
|
|
473
|
+
const settings = loadSettings();
|
|
474
|
+
if (!settings.telegramBotToken || !settings.telegramChatId) return;
|
|
475
|
+
|
|
476
|
+
const icon = status === 'done' ? '✅' : status === 'failed' ? '❌' : status === 'running' ? '🔄' : '⏳';
|
|
477
|
+
const msg = `${icon} Pipeline ${pipeline.id}/${nodeId}: ${status}${error ? `\n${error}` : ''}`;
|
|
478
|
+
|
|
479
|
+
try {
|
|
480
|
+
await fetch(`https://api.telegram.org/bot${settings.telegramBotToken}/sendMessage`, {
|
|
481
|
+
method: 'POST',
|
|
482
|
+
headers: { 'Content-Type': 'application/json' },
|
|
483
|
+
body: JSON.stringify({
|
|
484
|
+
chat_id: settings.telegramChatId.split(',')[0].trim(),
|
|
485
|
+
text: msg,
|
|
486
|
+
disable_web_page_preview: true,
|
|
487
|
+
}),
|
|
488
|
+
});
|
|
489
|
+
} catch {}
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
async function notifyPipelineComplete(pipeline: Pipeline) {
|
|
493
|
+
const settings = loadSettings();
|
|
494
|
+
if (!settings.telegramBotToken || !settings.telegramChatId) return;
|
|
495
|
+
|
|
496
|
+
const icon = pipeline.status === 'done' ? '🎉' : '💥';
|
|
497
|
+
const nodes = Object.entries(pipeline.nodes)
|
|
498
|
+
.map(([id, n]) => ` ${n.status === 'done' ? '✅' : n.status === 'failed' ? '❌' : '⏭'} ${id}`)
|
|
499
|
+
.join('\n');
|
|
500
|
+
|
|
501
|
+
const msg = `${icon} Pipeline ${pipeline.id} (${pipeline.workflowName}) ${pipeline.status}\n\n${nodes}`;
|
|
502
|
+
|
|
503
|
+
try {
|
|
504
|
+
await fetch(`https://api.telegram.org/bot${settings.telegramBotToken}/sendMessage`, {
|
|
505
|
+
method: 'POST',
|
|
506
|
+
headers: { 'Content-Type': 'application/json' },
|
|
507
|
+
body: JSON.stringify({
|
|
508
|
+
chat_id: settings.telegramChatId.split(',')[0].trim(),
|
|
509
|
+
text: msg,
|
|
510
|
+
disable_web_page_preview: true,
|
|
511
|
+
}),
|
|
512
|
+
});
|
|
513
|
+
} catch {}
|
|
514
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@aion0/forge",
|
|
3
|
-
"version": "0.2.
|
|
3
|
+
"version": "0.2.3",
|
|
4
4
|
"description": "Unified AI workflow platform — multi-model task orchestration, persistent sessions, web terminal, remote access",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"scripts": {
|
|
@@ -34,6 +34,7 @@
|
|
|
34
34
|
"@auth/core": "^0.34.3",
|
|
35
35
|
"@xterm/addon-fit": "^0.11.0",
|
|
36
36
|
"@xterm/xterm": "^6.0.0",
|
|
37
|
+
"@xyflow/react": "^12.10.1",
|
|
37
38
|
"ai": "^6.0.116",
|
|
38
39
|
"better-sqlite3": "^12.6.2",
|
|
39
40
|
"next": "^16.1.6",
|