@aion0/forge 0.4.3 → 0.4.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,52 @@
1
+ import { useState, useRef, useCallback } from 'react';
2
+
3
+ interface UseSidebarResizeOptions {
4
+ defaultWidth?: number;
5
+ minWidth?: number;
6
+ maxWidth?: number;
7
+ }
8
+
9
+ /**
10
+ * Provides drag-to-resize behaviour for a vertical split panel sidebar.
11
+ *
12
+ * Usage:
13
+ * const { sidebarWidth, onSidebarDragStart } = useSidebarResize({ defaultWidth: 224 });
14
+ *
15
+ * <aside style={{ width: sidebarWidth }} className="flex flex-col shrink-0 overflow-hidden">…</aside>
16
+ * <div onMouseDown={onSidebarDragStart} className="w-1 cursor-col-resize shrink-0 bg-[var(--border)] hover:bg-[var(--accent)]/50" />
17
+ * <main className="flex-1 min-w-0">…</main>
18
+ */
19
+ export function useSidebarResize({
20
+ defaultWidth = 224,
21
+ minWidth = 120,
22
+ maxWidth = 480,
23
+ }: UseSidebarResizeOptions = {}) {
24
+ const [sidebarWidth, setSidebarWidth] = useState(defaultWidth);
25
+ // Track the in-progress drag without causing re-renders in the move handler
26
+ const dragRef = useRef<{ startX: number; startW: number } | null>(null);
27
+ // Keep a mutable copy so the stable onSidebarDragStart callback always reads the latest width
28
+ const widthRef = useRef(defaultWidth);
29
+
30
+ const onSidebarDragStart = useCallback((e: React.MouseEvent) => {
31
+ e.preventDefault();
32
+ dragRef.current = { startX: e.clientX, startW: widthRef.current };
33
+
34
+ const onMove = (ev: MouseEvent) => {
35
+ if (!dragRef.current) return;
36
+ const next = Math.max(minWidth, Math.min(maxWidth, dragRef.current.startW + ev.clientX - dragRef.current.startX));
37
+ widthRef.current = next;
38
+ setSidebarWidth(next);
39
+ };
40
+
41
+ const onUp = () => {
42
+ dragRef.current = null;
43
+ window.removeEventListener('mousemove', onMove);
44
+ window.removeEventListener('mouseup', onUp);
45
+ };
46
+
47
+ window.addEventListener('mousemove', onMove);
48
+ window.addEventListener('mouseup', onUp);
49
+ }, [minWidth, maxWidth]);
50
+
51
+ return { sidebarWidth, onSidebarDragStart };
52
+ }
@@ -50,15 +50,12 @@ nodes:
50
50
 
51
51
  ## Built-in Workflows
52
52
 
53
- ### issue-auto-fix
54
- Fetches a GitHub issue → fixes code on new branchcreates PR.
53
+ ### issue-fix-and-review
54
+ Complete issue resolution pipeline: fetch issue → fix code create PRreview code → notify.
55
55
 
56
- Input: `issue_id`, `project`, `base_branch` (optional)
56
+ Steps: setup fetch-issue → fix-code → push-and-pr → review → cleanup
57
57
 
58
- ### pr-review
59
- Fetches PR diff → AI code review → posts result.
60
-
61
- Input: `pr_number`, `project`
58
+ Input: `issue_id`, `project`, `base_branch` (optional), `extra_context` (optional)
62
59
 
63
60
  ## CLI
64
61
 
@@ -67,6 +64,24 @@ forge flows # list available workflows
67
64
  forge run my-workflow # execute a workflow
68
65
  ```
69
66
 
67
+ ## Import a Workflow
68
+
69
+ 1. In Pipelines tab, click **Import**
70
+ 2. Paste YAML workflow content
71
+ 3. Click **Save Workflow**
72
+
73
+ Or save YAML directly to `~/.forge/data/flows/<name>.yaml`.
74
+
75
+ To create a workflow via Help AI: ask "Create a pipeline that does X" — the AI will generate the YAML for you to import.
76
+
77
+ ## Creating Workflows via API
78
+
79
+ ```bash
80
+ curl -X POST http://localhost:3000/api/pipelines \
81
+ -H 'Content-Type: application/json' \
82
+ -d '{"action": "save-workflow", "yaml": "name: my-flow\nnodes:\n step1:\n project: my-project\n prompt: do something"}'
83
+ ```
84
+
70
85
  ## Storage
71
86
 
72
87
  - Workflow YAML: `~/.forge/data/flows/`
@@ -28,7 +28,7 @@ Scan → Fetch Issue → Fix Code (new branch) → Push → Create PR → Auto R
28
28
  1. **Scan**: `gh issue list` finds open issues matching labels
29
29
  2. **Fix**: Claude Code analyzes issue and fixes code on `fix/<id>-<description>` branch
30
30
  3. **PR**: Pushes branch and creates Pull Request
31
- 4. **Review**: Automatically triggers `pr-review` pipeline
31
+ 4. **Review**: AI reviews the code changes in the same pipeline
32
32
  5. **Notify**: Results sent via Telegram (if configured)
33
33
 
34
34
  ## Manual Trigger
package/lib/init.ts CHANGED
@@ -95,7 +95,13 @@ export function ensureInitialized() {
95
95
  // Session watcher is safe (file-based, idempotent)
96
96
  startWatcherLoop();
97
97
 
98
- // Issue scannerauto-scan GitHub issues for configured projects
98
+ // Pipeline schedulerperiodic execution for project-bound workflows
99
+ try {
100
+ const { startScheduler } = require('./pipeline-scheduler');
101
+ startScheduler();
102
+ } catch {}
103
+
104
+ // Legacy issue scanner (still used if issue_autofix_config has entries)
99
105
  try {
100
106
  const { startScanner } = require('./issue-scanner');
101
107
  startScanner();
@@ -1,6 +1,6 @@
1
1
  /**
2
2
  * Issue Scanner — periodically scans GitHub issues for configured projects
3
- * and triggers issue-auto-fix pipeline for new issues.
3
+ * and triggers issue-fix-and-review pipeline for new issues.
4
4
  *
5
5
  * Per-project config stored in DB:
6
6
  * - enabled: boolean
@@ -196,7 +196,7 @@ export function scanAndTrigger(config: IssueAutofixConfig): { triggered: number;
196
196
  if (isProcessed(config.projectPath, issue.number)) continue;
197
197
 
198
198
  try {
199
- const pipeline = startPipeline('issue-auto-fix', {
199
+ const pipeline = startPipeline('issue-fix-and-review', {
200
200
  issue_id: String(issue.number),
201
201
  project: config.projectName,
202
202
  base_branch: config.baseBranch || 'auto-detect',
@@ -0,0 +1,239 @@
1
+ /**
2
+ * Pipeline Scheduler — manages project-pipeline bindings and scheduled execution.
3
+ * Replaces issue-scanner with a generic approach.
4
+ *
5
+ * Each project can bind multiple workflows. Each binding has:
6
+ * - config: JSON with workflow-specific settings (e.g. interval, labels for issue pipelines)
7
+ * - enabled: on/off toggle
8
+ * - scheduled execution via config.interval (minutes, 0 = manual only)
9
+ */
10
+
11
+ import { getDb } from '@/src/core/db/database';
12
+ import { getDbPath } from '@/src/config';
13
+ import { startPipeline, getPipeline } from './pipeline';
14
+ import { randomUUID } from 'node:crypto';
15
+
16
+ function db() { return getDb(getDbPath()); }
17
+
18
+ export interface ProjectPipelineBinding {
19
+ id: number;
20
+ projectPath: string;
21
+ projectName: string;
22
+ workflowName: string;
23
+ enabled: boolean;
24
+ config: Record<string, any>; // interval (minutes), labels, baseBranch, etc.
25
+ lastRunAt: string | null;
26
+ createdAt: string;
27
+ }
28
+
29
+ export interface PipelineRun {
30
+ id: string;
31
+ projectPath: string;
32
+ workflowName: string;
33
+ pipelineId: string;
34
+ status: string;
35
+ summary: string;
36
+ createdAt: string;
37
+ }
38
+
39
+ // ─── Bindings CRUD ───────────────────────────────────────
40
+
41
+ export function getBindings(projectPath: string): ProjectPipelineBinding[] {
42
+ return (db().prepare('SELECT * FROM project_pipelines WHERE project_path = ? ORDER BY created_at ASC')
43
+ .all(projectPath) as any[]).map(r => ({
44
+ id: r.id,
45
+ projectPath: r.project_path,
46
+ projectName: r.project_name,
47
+ workflowName: r.workflow_name,
48
+ enabled: !!r.enabled,
49
+ config: JSON.parse(r.config || '{}'),
50
+ lastRunAt: r.last_run_at || null,
51
+ createdAt: r.created_at,
52
+ }));
53
+ }
54
+
55
+ export function getAllScheduledBindings(): ProjectPipelineBinding[] {
56
+ return (db().prepare('SELECT * FROM project_pipelines WHERE enabled = 1')
57
+ .all() as any[]).map(r => ({
58
+ id: r.id,
59
+ projectPath: r.project_path,
60
+ projectName: r.project_name,
61
+ workflowName: r.workflow_name,
62
+ enabled: true,
63
+ config: JSON.parse(r.config || '{}'),
64
+ lastRunAt: r.last_run_at || null,
65
+ createdAt: r.created_at,
66
+ })).filter(b => b.config.interval && b.config.interval > 0);
67
+ }
68
+
69
+ export function addBinding(projectPath: string, projectName: string, workflowName: string, config?: Record<string, any>): void {
70
+ db().prepare(`
71
+ INSERT OR REPLACE INTO project_pipelines (project_path, project_name, workflow_name, config)
72
+ VALUES (?, ?, ?, ?)
73
+ `).run(projectPath, projectName, workflowName, JSON.stringify(config || {}));
74
+ }
75
+
76
+ export function removeBinding(projectPath: string, workflowName: string): void {
77
+ db().prepare('DELETE FROM project_pipelines WHERE project_path = ? AND workflow_name = ?')
78
+ .run(projectPath, workflowName);
79
+ }
80
+
81
+ export function updateBinding(projectPath: string, workflowName: string, updates: { enabled?: boolean; config?: Record<string, any> }): void {
82
+ if (updates.enabled !== undefined) {
83
+ db().prepare('UPDATE project_pipelines SET enabled = ? WHERE project_path = ? AND workflow_name = ?')
84
+ .run(updates.enabled ? 1 : 0, projectPath, workflowName);
85
+ }
86
+ if (updates.config) {
87
+ db().prepare('UPDATE project_pipelines SET config = ? WHERE project_path = ? AND workflow_name = ?')
88
+ .run(JSON.stringify(updates.config), projectPath, workflowName);
89
+ }
90
+ }
91
+
92
+ function updateLastRunAt(projectPath: string, workflowName: string): void {
93
+ db().prepare('UPDATE project_pipelines SET last_run_at = ? WHERE project_path = ? AND workflow_name = ?')
94
+ .run(new Date().toISOString(), projectPath, workflowName);
95
+ }
96
+
97
+ // ─── Runs ────────────────────────────────────────────────
98
+
99
+ export function recordRun(projectPath: string, workflowName: string, pipelineId: string): string {
100
+ const id = randomUUID().slice(0, 8);
101
+ db().prepare(`
102
+ INSERT INTO pipeline_runs (id, project_path, workflow_name, pipeline_id, status)
103
+ VALUES (?, ?, ?, ?, 'running')
104
+ `).run(id, projectPath, workflowName, pipelineId);
105
+ return id;
106
+ }
107
+
108
+ export function updateRun(pipelineId: string, status: string, summary?: string): void {
109
+ if (summary) {
110
+ db().prepare('UPDATE pipeline_runs SET status = ?, summary = ? WHERE pipeline_id = ?')
111
+ .run(status, summary, pipelineId);
112
+ } else {
113
+ db().prepare('UPDATE pipeline_runs SET status = ? WHERE pipeline_id = ?')
114
+ .run(status, pipelineId);
115
+ }
116
+ }
117
+
118
+ export function getRuns(projectPath: string, workflowName?: string, limit = 20): PipelineRun[] {
119
+ const query = workflowName
120
+ ? 'SELECT * FROM pipeline_runs WHERE project_path = ? AND workflow_name = ? ORDER BY created_at DESC LIMIT ?'
121
+ : 'SELECT * FROM pipeline_runs WHERE project_path = ? ORDER BY created_at DESC LIMIT ?';
122
+ const params = workflowName ? [projectPath, workflowName, limit] : [projectPath, limit];
123
+ return (db().prepare(query).all(...params) as any[]).map(r => ({
124
+ id: r.id,
125
+ projectPath: r.project_path,
126
+ workflowName: r.workflow_name,
127
+ pipelineId: r.pipeline_id,
128
+ status: r.status,
129
+ summary: r.summary || '',
130
+ createdAt: r.created_at,
131
+ }));
132
+ }
133
+
134
+ export function deleteRun(id: string): void {
135
+ db().prepare('DELETE FROM pipeline_runs WHERE id = ?').run(id);
136
+ }
137
+
138
+ // ─── Trigger ─────────────────────────────────────────────
139
+
140
+ export function triggerPipeline(projectPath: string, projectName: string, workflowName: string, extraInput?: Record<string, any>): { pipelineId: string; runId: string } {
141
+ const input: Record<string, string> = {
142
+ project: projectName,
143
+ ...extraInput,
144
+ };
145
+
146
+ const pipeline = startPipeline(workflowName, input);
147
+ const runId = recordRun(projectPath, workflowName, pipeline.id);
148
+ updateLastRunAt(projectPath, workflowName);
149
+ console.log(`[pipeline-scheduler] Triggered ${workflowName} for ${projectName} (pipeline: ${pipeline.id})`);
150
+ return { pipelineId: pipeline.id, runId };
151
+ }
152
+
153
+ // ─── Status sync (called from pipeline completion) ───────
154
+
155
+ export function syncRunStatus(pipelineId: string): void {
156
+ const pipeline = getPipeline(pipelineId);
157
+ if (!pipeline) return;
158
+
159
+ // Build summary from outputs
160
+ let summary = '';
161
+ for (const [nodeId, node] of Object.entries(pipeline.nodes)) {
162
+ if (node.outputs && Object.keys(node.outputs).length > 0) {
163
+ for (const [key, val] of Object.entries(node.outputs)) {
164
+ if (val && typeof val === 'string' && val.length < 500) {
165
+ summary += `${nodeId}.${key}: ${val.slice(0, 200)}\n`;
166
+ }
167
+ }
168
+ }
169
+ }
170
+
171
+ updateRun(pipelineId, pipeline.status, summary.trim());
172
+ }
173
+
174
+ // ─── Periodic Scheduler ─────────────────────────────────
175
+
176
+ const schedulerKey = Symbol.for('forge-pipeline-scheduler');
177
+ const gAny = globalThis as any;
178
+ if (!gAny[schedulerKey]) gAny[schedulerKey] = { started: false, timer: null as NodeJS.Timeout | null };
179
+ const schedulerState = gAny[schedulerKey] as { started: boolean; timer: NodeJS.Timeout | null };
180
+
181
+ const CHECK_INTERVAL_MS = 60 * 1000; // check every 60s
182
+
183
+ export function startScheduler(): void {
184
+ if (schedulerState.started) return;
185
+ schedulerState.started = true;
186
+
187
+ // Check on startup after a short delay
188
+ setTimeout(() => tickScheduler(), 5000);
189
+
190
+ // Then check periodically
191
+ schedulerState.timer = setInterval(() => tickScheduler(), CHECK_INTERVAL_MS);
192
+ console.log('[pipeline-scheduler] Scheduler started (checking every 60s)');
193
+ }
194
+
195
+ export function stopScheduler(): void {
196
+ if (schedulerState.timer) {
197
+ clearInterval(schedulerState.timer);
198
+ schedulerState.timer = null;
199
+ }
200
+ schedulerState.started = false;
201
+ }
202
+
203
+ function tickScheduler(): void {
204
+ try {
205
+ const bindings = getAllScheduledBindings();
206
+ const now = Date.now();
207
+
208
+ for (const binding of bindings) {
209
+ const intervalMs = binding.config.interval * 60 * 1000;
210
+ const lastRun = binding.lastRunAt ? new Date(binding.lastRunAt).getTime() : 0;
211
+ const elapsed = now - lastRun;
212
+
213
+ if (elapsed >= intervalMs) {
214
+ // Check if there's already a running pipeline for this binding
215
+ const recentRuns = getRuns(binding.projectPath, binding.workflowName, 1);
216
+ if (recentRuns.length > 0 && recentRuns[0].status === 'running') {
217
+ continue; // skip if still running
218
+ }
219
+
220
+ try {
221
+ console.log(`[pipeline-scheduler] Scheduled trigger: ${binding.workflowName} for ${binding.projectName}`);
222
+ triggerPipeline(binding.projectPath, binding.projectName, binding.workflowName, binding.config.input);
223
+ } catch (e: any) {
224
+ console.error(`[pipeline-scheduler] Scheduled trigger failed for ${binding.workflowName}:`, e.message);
225
+ }
226
+ }
227
+ }
228
+ } catch (e: any) {
229
+ console.error('[pipeline-scheduler] Tick error:', e.message);
230
+ }
231
+ }
232
+
233
+ /** Get next scheduled run time for a binding */
234
+ export function getNextRunTime(binding: ProjectPipelineBinding): string | null {
235
+ if (!binding.enabled || !binding.config.interval || binding.config.interval <= 0) return null;
236
+ const intervalMs = binding.config.interval * 60 * 1000;
237
+ const lastRun = binding.lastRunAt ? new Date(binding.lastRunAt).getTime() : new Date(binding.createdAt).getTime();
238
+ return new Date(lastRun + intervalMs).toISOString();
239
+ }
package/lib/pipeline.ts CHANGED
@@ -75,9 +75,9 @@ export interface Pipeline {
75
75
  // ─── Built-in workflows ──────────────────────────────────
76
76
 
77
77
  export const BUILTIN_WORKFLOWS: Record<string, string> = {
78
- 'issue-auto-fix': `
79
- name: issue-auto-fix
80
- description: "Fetch a GitHub issue → fix code on a new branch create PR"
78
+ 'issue-fix-and-review': `
79
+ name: issue-fix-and-review
80
+ description: "Fetch GitHub issue → fix code create PRreview PR → notify"
81
81
  input:
82
82
  issue_id: "GitHub issue number"
83
83
  project: "Project name"
@@ -107,8 +107,10 @@ nodes:
107
107
  project: "{{input.project}}"
108
108
  depends_on: [setup]
109
109
  prompt: |
110
+ ISSUE_ID="{{input.issue_id}}" && \
111
+ if [ -z "$ISSUE_ID" ]; then echo "__SKIP__ No issue_id provided" && exit 0; fi && \
110
112
  REPO=$(echo '{{nodes.setup.outputs.info}}' | grep REPO= | cut -d= -f2) && \
111
- gh issue view {{input.issue_id}} --json title,body,labels,number -R "$REPO"
113
+ gh issue view "$ISSUE_ID" --json title,body,labels,number -R "$REPO"
112
114
  outputs:
113
115
  - name: issue_json
114
116
  extract: stdout
@@ -148,72 +150,43 @@ nodes:
148
150
  outputs:
149
151
  - name: pr_url
150
152
  extract: stdout
151
- notify:
152
- mode: shell
153
- project: "{{input.project}}"
154
- depends_on: [push-and-pr]
155
- prompt: |
156
- ORIG=$(echo '{{nodes.setup.outputs.info}}' | grep ORIG_BRANCH= | cut -d= -f2) && \
157
- if [ -n "$(git status --porcelain)" ]; then
158
- echo "PR created for issue #{{input.issue_id}}: {{nodes.push-and-pr.outputs.pr_url}} (staying on $(git branch --show-current) - uncommitted changes)"
159
- else
160
- git checkout "$ORIG" 2>/dev/null || true
161
- echo "PR created for issue #{{input.issue_id}}: {{nodes.push-and-pr.outputs.pr_url}} (switched back to $ORIG)"
162
- fi
163
- `,
164
- 'pr-review': `
165
- name: pr-review
166
- description: "Review a PR → approve or request changes → notify"
167
- input:
168
- pr_number: "Pull request number"
169
- project: "Project name"
170
- nodes:
171
- setup:
172
- mode: shell
173
- project: "{{input.project}}"
174
- prompt: |
175
- REPO=$(gh repo view --json nameWithOwner -q .nameWithOwner 2>/dev/null || git remote get-url origin | sed 's/.*github.com[:/]//;s/.git$//') && \
176
- echo "REPO=$REPO"
177
- outputs:
178
- - name: info
179
- extract: stdout
180
- fetch-pr:
181
- mode: shell
182
- project: "{{input.project}}"
183
- depends_on: [setup]
184
- prompt: |
185
- REPO=$(echo '{{nodes.setup.outputs.info}}' | grep REPO= | cut -d= -f2) && \
186
- gh pr diff {{input.pr_number}} -R "$REPO"
187
- outputs:
188
- - name: diff
189
- extract: stdout
190
153
  review:
191
154
  project: "{{input.project}}"
192
- depends_on: [fetch-pr]
155
+ depends_on: [push-and-pr]
193
156
  prompt: |
194
- Review the following pull request diff carefully. Check for:
157
+ Review the code changes for issue #{{input.issue_id}}.
158
+
159
+ Fix summary: {{nodes.fix-code.outputs.summary}}
160
+
161
+ Git diff:
162
+ {{nodes.fix-code.outputs.diff}}
163
+
164
+ Check for:
195
165
  - Bugs and logic errors
196
166
  - Security vulnerabilities
197
167
  - Performance issues
198
- - Code style and best practices
199
-
200
- PR #{{input.pr_number}} diff:
201
- {{nodes.fetch-pr.outputs.diff}}
168
+ - Whether the fix actually addresses the issue
202
169
 
203
170
  Respond with:
204
171
  1. APPROVED or CHANGES_REQUESTED
205
- 2. Detailed list of specific issues found with file paths and line numbers
206
- 3. Suggestions for improvement
172
+ 2. Specific issues found with file paths and line numbers
207
173
  outputs:
208
174
  - name: review_result
209
175
  extract: result
210
- post-review:
176
+ cleanup:
211
177
  mode: shell
212
178
  project: "{{input.project}}"
213
179
  depends_on: [review]
214
- prompt: "echo 'Review complete for PR #{{input.pr_number}}'"
180
+ prompt: |
181
+ ORIG=$(echo '{{nodes.setup.outputs.info}}' | grep ORIG_BRANCH= | cut -d= -f2) && \
182
+ if [ -n "$(git status --porcelain)" ]; then
183
+ echo "Issue #{{input.issue_id}} — PR: {{nodes.push-and-pr.outputs.pr_url}} | Review: {{nodes.review.outputs.review_result}} (staying on $(git branch --show-current))"
184
+ else
185
+ git checkout "$ORIG" 2>/dev/null || true
186
+ echo "Issue #{{input.issue_id}} — PR: {{nodes.push-and-pr.outputs.pr_url}} | Review: {{nodes.review.outputs.review_result}} (switched back to $ORIG)"
187
+ fi
215
188
  outputs:
216
- - name: status
189
+ - name: result
217
190
  extract: stdout
218
191
  `,
219
192
  };
@@ -607,39 +580,11 @@ function checkPipelineCompletion(pipeline: Pipeline) {
607
580
  savePipeline(pipeline);
608
581
  notifyPipelineComplete(pipeline);
609
582
 
610
- // Update issue_autofix_processed status
611
- if (pipeline.workflowName === 'issue-auto-fix') {
612
- try {
613
- const { updateProcessedStatus } = require('./issue-scanner');
614
- const issueId = parseInt(pipeline.input.issue_id);
615
- const projectInfo = getProjectInfo(pipeline.input.project);
616
- if (projectInfo && issueId) {
617
- const prOutput = pipeline.nodes['push-and-pr']?.outputs?.pr_url || '';
618
- const prMatch = prOutput.match(/\/pull\/(\d+)/);
619
- const prNumber = prMatch ? parseInt(prMatch[1]) : undefined;
620
- updateProcessedStatus(projectInfo.path, issueId, pipeline.status, prNumber);
621
- }
622
- } catch {}
623
- }
624
-
625
- // Auto-chain: issue-auto-fix → pr-review
626
- if (pipeline.workflowName === 'issue-auto-fix' && pipeline.status === 'done') {
627
- try {
628
- // Extract PR number from push-and-pr output
629
- const prOutput = pipeline.nodes['push-and-pr']?.outputs?.pr_url || '';
630
- const prMatch = prOutput.match(/\/pull\/(\d+)/);
631
- if (prMatch) {
632
- const prNumber = prMatch[1];
633
- console.log(`[pipeline] Auto-triggering pr-review for PR #${prNumber}`);
634
- startPipeline('pr-review', {
635
- pr_number: prNumber,
636
- project: pipeline.input.project || '',
637
- });
638
- }
639
- } catch (e) {
640
- console.error('[pipeline] Failed to auto-trigger pr-review:', e);
641
- }
642
- }
583
+ // Sync run status to project pipeline runs
584
+ try {
585
+ const { syncRunStatus } = require('./pipeline-scheduler');
586
+ syncRunStatus(pipeline.id);
587
+ } catch {}
643
588
 
644
589
  // Release project lock
645
590
  const workflow = getWorkflow(pipeline.workflowName);
@@ -695,6 +640,17 @@ function setupTaskListener(pipelineId: string) {
695
640
  }
696
641
  }
697
642
 
643
+ // Convention: if stdout contains __SKIP__, mark node as skipped (downstream nodes will also skip)
644
+ const outputStr = task.resultSummary || '';
645
+ if (outputStr.includes('__SKIP__')) {
646
+ nodeState.status = 'skipped';
647
+ nodeState.completedAt = new Date().toISOString();
648
+ savePipeline(pipeline);
649
+ scheduleReadyNodes(pipeline, workflow);
650
+ checkPipelineCompletion(pipeline);
651
+ return;
652
+ }
653
+
698
654
  // Check routes for conditional next step
699
655
  if (nodeDef.routes.length > 0) {
700
656
  const nextNode = evaluateRoutes(nodeDef.routes, nodeState.outputs, pipeline);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aion0/forge",
3
- "version": "0.4.3",
3
+ "version": "0.4.5",
4
4
  "description": "Unified AI workflow platform — multi-model task orchestration, persistent sessions, web terminal, remote access",
5
5
  "type": "module",
6
6
  "scripts": {
@@ -34,6 +34,7 @@ function initSchema(db: Database.Database) {
34
34
  migrate("ALTER TABLE skills ADD COLUMN installed_version TEXT NOT NULL DEFAULT ''");
35
35
  migrate('ALTER TABLE skills ADD COLUMN rating REAL DEFAULT 0');
36
36
  migrate('ALTER TABLE skills ADD COLUMN deleted_remotely INTEGER NOT NULL DEFAULT 0');
37
+ migrate('ALTER TABLE project_pipelines ADD COLUMN last_run_at TEXT');
37
38
 
38
39
  db.exec(`
39
40
  CREATE TABLE IF NOT EXISTS sessions (
@@ -155,6 +156,29 @@ function initSchema(db: Database.Database) {
155
156
  data TEXT NOT NULL DEFAULT '{}'
156
157
  );
157
158
 
159
+ -- Project pipeline bindings (which workflows are attached to which projects)
160
+ CREATE TABLE IF NOT EXISTS project_pipelines (
161
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
162
+ project_path TEXT NOT NULL,
163
+ project_name TEXT NOT NULL,
164
+ workflow_name TEXT NOT NULL,
165
+ enabled INTEGER NOT NULL DEFAULT 1,
166
+ config TEXT NOT NULL DEFAULT '{}',
167
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
168
+ UNIQUE(project_path, workflow_name)
169
+ );
170
+
171
+ -- Pipeline execution log (per project, replaces issue_autofix_processed)
172
+ CREATE TABLE IF NOT EXISTS pipeline_runs (
173
+ id TEXT PRIMARY KEY,
174
+ project_path TEXT NOT NULL,
175
+ workflow_name TEXT NOT NULL,
176
+ pipeline_id TEXT NOT NULL,
177
+ status TEXT NOT NULL DEFAULT 'running',
178
+ summary TEXT,
179
+ created_at TEXT NOT NULL DEFAULT (datetime('now'))
180
+ );
181
+
158
182
  -- Project favorites
159
183
  CREATE TABLE IF NOT EXISTS project_favorites (
160
184
  project_path TEXT PRIMARY KEY,