@aion0/forge 0.4.5 → 0.4.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/RELEASE_NOTES.md CHANGED
@@ -1,11 +1,16 @@
1
- # Forge v0.4.5
1
+ # Forge v0.4.7
2
2
 
3
- Released: 2026-03-21
3
+ Released: 2026-03-22
4
4
 
5
- ## Changes since v0.4.4
5
+ ## Changes since v0.4.6
6
6
 
7
7
  ### Bug Fixes
8
- - fix: add drag-to-resize for left sidebars across all split-panel pages (#14)
8
+ - fix: serial issue scanning, shell ANSI-C escaping, pipeline navigation
9
+ - fix: prevent concurrent startTunnel calls from killing each other (#16)
9
10
 
11
+ ### Other
12
+ - improve pipeline
13
+ - fix(#17): normalize SQLite datetime strings to ISO 8601 UTC
10
14
 
11
- **Full Changelog**: https://github.com/aiwatching/forge/compare/v0.4.4...v0.4.5
15
+
16
+ **Full Changelog**: https://github.com/aiwatching/forge/compare/v0.4.6...v0.4.7
@@ -8,6 +8,8 @@ import {
8
8
  deleteRun,
9
9
  triggerPipeline,
10
10
  getNextRunTime,
11
+ scanAndTriggerIssues,
12
+ resetDedup,
11
13
  } from '@/lib/pipeline-scheduler';
12
14
  import { listWorkflows } from '@/lib/pipeline';
13
15
 
@@ -64,5 +66,26 @@ export async function POST(req: Request) {
64
66
  return NextResponse.json({ ok: true });
65
67
  }
66
68
 
69
+ if (body.action === 'scan-now') {
70
+ const { projectPath, projectName, workflowName } = body;
71
+ if (!projectPath || !workflowName) return NextResponse.json({ error: 'projectPath and workflowName required' }, { status: 400 });
72
+ const bindings = getBindings(projectPath);
73
+ const binding = bindings.find(b => b.workflowName === workflowName);
74
+ if (!binding) return NextResponse.json({ error: 'Binding not found' }, { status: 404 });
75
+ try {
76
+ const result = scanAndTriggerIssues(binding);
77
+ return NextResponse.json({ ok: true, ...result });
78
+ } catch (e: any) {
79
+ return NextResponse.json({ ok: false, error: e.message }, { status: 500 });
80
+ }
81
+ }
82
+
83
+ if (body.action === 'reset-dedup') {
84
+ const { projectPath, workflowName, dedupKey } = body;
85
+ if (!projectPath || !workflowName || !dedupKey) return NextResponse.json({ error: 'projectPath, workflowName, dedupKey required' }, { status: 400 });
86
+ resetDedup(projectPath, workflowName, dedupKey);
87
+ return NextResponse.json({ ok: true });
88
+ }
89
+
67
90
  return NextResponse.json({ error: 'Invalid action' }, { status: 400 });
68
91
  }
@@ -101,6 +101,18 @@ export default function Dashboard({ user }: { user: any }) {
101
101
  return () => window.removeEventListener('forge:open-terminal', handler);
102
102
  }, []);
103
103
 
104
+ // Listen for navigation events (e.g. from ProjectDetail → Pipelines)
105
+ const [pendingPipelineId, setPendingPipelineId] = useState<string | null>(null);
106
+ useEffect(() => {
107
+ const handler = (e: Event) => {
108
+ const { view, pipelineId } = (e as CustomEvent).detail;
109
+ if (view) setViewMode(view);
110
+ if (pipelineId) setPendingPipelineId(pipelineId);
111
+ };
112
+ window.addEventListener('forge:navigate', handler);
113
+ return () => window.removeEventListener('forge:navigate', handler);
114
+ }, []);
115
+
104
116
  // Version check (on mount + every 10 min)
105
117
  useEffect(() => {
106
118
  const check = () => fetch('/api/version').then(r => r.json()).then(setVersionInfo).catch(() => {});
@@ -561,7 +573,11 @@ export default function Dashboard({ user }: { user: any }) {
561
573
  {/* Pipelines */}
562
574
  {viewMode === 'pipelines' && (
563
575
  <Suspense fallback={<div className="flex-1 flex items-center justify-center text-[var(--text-secondary)]">Loading...</div>}>
564
- <PipelineView onViewTask={(taskId) => { setViewMode('tasks'); setActiveTaskId(taskId); }} />
576
+ <PipelineView
577
+ onViewTask={(taskId) => { setViewMode('tasks'); setActiveTaskId(taskId); }}
578
+ focusPipelineId={pendingPipelineId}
579
+ onFocusHandled={() => setPendingPipelineId(null)}
580
+ />
565
581
  </Suspense>
566
582
  )}
567
583
 
@@ -64,7 +64,7 @@ const STATUS_COLOR: Record<string, string> = {
64
64
  skipped: 'text-gray-500',
65
65
  };
66
66
 
67
- export default function PipelineView({ onViewTask }: { onViewTask?: (taskId: string) => void }) {
67
+ export default function PipelineView({ onViewTask, focusPipelineId, onFocusHandled }: { onViewTask?: (taskId: string) => void; focusPipelineId?: string | null; onFocusHandled?: () => void }) {
68
68
  const { sidebarWidth, onSidebarDragStart } = useSidebarResize({ defaultWidth: 256, minWidth: 140, maxWidth: 480 });
69
69
  const [pipelines, setPipelines] = useState<Pipeline[]>([]);
70
70
  const [workflows, setWorkflows] = useState<Workflow[]>([]);
@@ -100,6 +100,17 @@ export default function PipelineView({ onViewTask }: { onViewTask?: (taskId: str
100
100
  return () => clearInterval(timer);
101
101
  }, [fetchData]);
102
102
 
103
+ // Focus on a specific pipeline (from external navigation)
104
+ useEffect(() => {
105
+ if (!focusPipelineId || pipelines.length === 0) return;
106
+ const target = pipelines.find(p => p.id === focusPipelineId);
107
+ if (target) {
108
+ setSelectedPipeline(target);
109
+ setShowEditor(false);
110
+ onFocusHandled?.();
111
+ }
112
+ }, [focusPipelineId, pipelines, onFocusHandled]);
113
+
103
114
  // Refresh selected pipeline
104
115
  useEffect(() => {
105
116
  if (!selectedPipeline || selectedPipeline.status !== 'running') return;
@@ -69,10 +69,12 @@ export default memo(function ProjectDetail({ projectPath, projectName, hasGit }:
69
69
  const [projectTab, setProjectTab] = useState<'code' | 'skills' | 'claudemd' | 'pipelines'>('code');
70
70
  // Pipeline bindings state
71
71
  const [pipelineBindings, setPipelineBindings] = useState<{ id: number; workflowName: string; enabled: boolean; config: any; lastRunAt: string | null; nextRunAt: string | null }[]>([]);
72
- const [pipelineRuns, setPipelineRuns] = useState<{ id: string; workflowName: string; pipelineId: string; status: string; summary: string; createdAt: string }[]>([]);
72
+ const [pipelineRuns, setPipelineRuns] = useState<{ id: string; workflowName: string; pipelineId: string; status: string; summary: string; dedupKey: string | null; createdAt: string }[]>([]);
73
73
  const [availableWorkflows, setAvailableWorkflows] = useState<{ name: string; description?: string; builtin?: boolean }[]>([]);
74
74
  const [showAddPipeline, setShowAddPipeline] = useState(false);
75
75
  const [triggerInput, setTriggerInput] = useState<Record<string, string>>({});
76
+ const [runMenu, setRunMenu] = useState<string | null>(null); // workflowName of open run menu
77
+ const [issueInput, setIssueInput] = useState('');
76
78
  const [claudeMdContent, setClaudeMdContent] = useState('');
77
79
  const [claudeMdExists, setClaudeMdExists] = useState(false);
78
80
  const [claudeTemplates, setClaudeTemplates] = useState<{ id: string; name: string; description: string; tags: string[]; builtin: boolean; content: string }[]>([]);
@@ -846,10 +848,74 @@ export default memo(function ProjectDetail({ projectPath, projectName, hasGit }:
846
848
  }} className="accent-[var(--accent)]" />
847
849
  Enabled
848
850
  </label>
849
- <button
850
- onClick={() => triggerProjectPipeline(b.workflowName, triggerInput)}
851
- className="text-[9px] px-2 py-0.5 border border-[var(--accent)] text-[var(--accent)] rounded hover:bg-[var(--accent)] hover:text-white"
852
- >Run</button>
851
+ <div className="relative">
852
+ <button
853
+ onClick={() => {
854
+ const isIssueWf = b.workflowName === 'issue-auto-fix' || b.workflowName === 'issue-fix-and-review';
855
+ if (!isIssueWf) {
856
+ triggerProjectPipeline(b.workflowName, triggerInput);
857
+ } else {
858
+ setRunMenu(runMenu === b.workflowName ? null : b.workflowName);
859
+ setIssueInput('');
860
+ }
861
+ }}
862
+ className="text-[9px] px-2 py-0.5 border border-[var(--accent)] text-[var(--accent)] rounded hover:bg-[var(--accent)] hover:text-white"
863
+ >Run</button>
864
+ {runMenu === b.workflowName && (
865
+ <div className="absolute top-full right-0 mt-1 z-20 bg-[var(--bg-secondary)] border border-[var(--border)] rounded shadow-lg p-2 space-y-2 w-[200px]">
866
+ <button
867
+ onClick={async () => {
868
+ setRunMenu(null);
869
+ try {
870
+ const res = await fetch('/api/project-pipelines', {
871
+ method: 'POST',
872
+ headers: { 'Content-Type': 'application/json' },
873
+ body: JSON.stringify({ action: 'scan-now', projectPath, projectName, workflowName: b.workflowName }),
874
+ });
875
+ const data = await res.json();
876
+ if (data.error) alert(`Scan error: ${data.error}`);
877
+ else alert(`Scanned ${data.total} issues, triggered ${data.triggered} fix${data.pending > 0 ? ` (${data.pending} more pending)` : ''}`);
878
+ fetchPipelineBindings();
879
+ } catch { alert('Scan failed'); }
880
+ }}
881
+ className="w-full text-[9px] px-2 py-1.5 rounded border border-green-500/50 text-green-400 hover:bg-green-500/10 font-medium"
882
+ >Auto Scan — fix all new issues</button>
883
+ <div className="border-t border-[var(--border)]/50 my-1" />
884
+ <div className="flex items-center gap-1">
885
+ <input
886
+ type="text"
887
+ value={issueInput}
888
+ onChange={e => setIssueInput(e.target.value)}
889
+ placeholder="Issue #"
890
+ className="flex-1 bg-[var(--bg-tertiary)] border border-[var(--border)] rounded px-2 py-1 text-[9px] text-[var(--text-primary)]"
891
+ onKeyDown={e => {
892
+ if (e.key === 'Enter' && issueInput.trim()) {
893
+ setRunMenu(null);
894
+ triggerProjectPipeline(b.workflowName, {
895
+ ...triggerInput,
896
+ issue_id: issueInput.trim(),
897
+ base_branch: b.config.baseBranch || 'auto-detect',
898
+ });
899
+ }
900
+ }}
901
+ autoFocus
902
+ />
903
+ <button
904
+ onClick={() => {
905
+ if (!issueInput.trim()) return;
906
+ setRunMenu(null);
907
+ triggerProjectPipeline(b.workflowName, {
908
+ ...triggerInput,
909
+ issue_id: issueInput.trim(),
910
+ base_branch: b.config.baseBranch || 'auto-detect',
911
+ });
912
+ }}
913
+ className="text-[9px] px-2 py-1 bg-[var(--accent)] text-white rounded hover:opacity-80"
914
+ >Fix</button>
915
+ </div>
916
+ </div>
917
+ )}
918
+ </div>
853
919
  <button
854
920
  onClick={async () => {
855
921
  if (!confirm(`Remove "${b.workflowName}" from this project?`)) return;
@@ -900,6 +966,51 @@ export default memo(function ProjectDetail({ projectPath, projectName, hasGit }:
900
966
  </span>
901
967
  )}
902
968
  </div>
969
+ {/* Issue scan config (for issue-fix-and-review workflow) */}
970
+ {(b.workflowName === 'issue-auto-fix' || b.workflowName === 'issue-fix-and-review') && (
971
+ <div className="space-y-1.5 pt-1 border-t border-[var(--border)]/30">
972
+ {b.config.interval > 0 && (
973
+ <div className="text-[8px] text-[var(--text-secondary)]">
974
+ Scheduled mode: auto-scans GitHub issues and fixes new ones
975
+ </div>
976
+ )}
977
+ <div className="flex items-center gap-2 text-[9px]">
978
+ <label className="text-[var(--text-secondary)]">Labels:</label>
979
+ <input
980
+ type="text"
981
+ defaultValue={(b.config.labels || []).join(', ')}
982
+ placeholder="bug, autofix (empty = all)"
983
+ onBlur={async (e) => {
984
+ const labels = e.target.value.split(',').map((s: string) => s.trim()).filter(Boolean);
985
+ const newConfig = { ...b.config, labels };
986
+ await fetch('/api/project-pipelines', {
987
+ method: 'POST',
988
+ headers: { 'Content-Type': 'application/json' },
989
+ body: JSON.stringify({ action: 'update', projectPath, workflowName: b.workflowName, config: newConfig }),
990
+ });
991
+ fetchPipelineBindings();
992
+ }}
993
+ className="flex-1 bg-[var(--bg-secondary)] border border-[var(--border)] rounded px-1.5 py-0.5 text-[9px] text-[var(--text-primary)]"
994
+ />
995
+ <label className="text-[var(--text-secondary)]">Base:</label>
996
+ <input
997
+ type="text"
998
+ defaultValue={b.config.baseBranch || ''}
999
+ placeholder="auto-detect"
1000
+ onBlur={async (e) => {
1001
+ const newConfig = { ...b.config, baseBranch: e.target.value.trim() || undefined };
1002
+ await fetch('/api/project-pipelines', {
1003
+ method: 'POST',
1004
+ headers: { 'Content-Type': 'application/json' },
1005
+ body: JSON.stringify({ action: 'update', projectPath, workflowName: b.workflowName, config: newConfig }),
1006
+ });
1007
+ fetchPipelineBindings();
1008
+ }}
1009
+ className="w-20 bg-[var(--bg-secondary)] border border-[var(--border)] rounded px-1.5 py-0.5 text-[9px] text-[var(--text-primary)]"
1010
+ />
1011
+ </div>
1012
+ </div>
1013
+ )}
903
1014
  </div>
904
1015
  ))
905
1016
  )}
@@ -913,30 +1024,58 @@ export default memo(function ProjectDetail({ projectPath, projectName, hasGit }:
913
1024
  {pipelineRuns.map(run => (
914
1025
  <div key={run.id} className="flex items-start gap-2 px-3 py-2 border-b border-[var(--border)]/30 last:border-b-0 text-[10px]">
915
1026
  <span className={`shrink-0 ${
916
- run.status === 'done' ? 'text-green-400' : run.status === 'failed' ? 'text-red-400' : 'text-yellow-400'
1027
+ run.status === 'done' ? 'text-green-400' : run.status === 'failed' ? 'text-red-400' : run.status === 'skipped' ? 'text-gray-400' : 'text-yellow-400'
917
1028
  }`}>●</span>
918
1029
  <div className="flex-1 min-w-0">
919
1030
  <div className="flex items-center gap-2">
920
1031
  <span className="text-[var(--text-primary)] font-medium">{run.workflowName}</span>
921
- <span className="text-[8px] text-[var(--text-secondary)] font-mono">{run.pipelineId.slice(0, 8)}</span>
1032
+ {run.dedupKey && (
1033
+ <span className="text-[8px] text-[var(--accent)] font-mono">{run.dedupKey.replace('issue:', '#')}</span>
1034
+ )}
1035
+ <button
1036
+ onClick={() => window.dispatchEvent(new CustomEvent('forge:navigate', { detail: { view: 'pipelines', pipelineId: run.pipelineId } }))}
1037
+ className="text-[8px] text-[var(--accent)] font-mono hover:underline"
1038
+ title="View in Pipelines"
1039
+ >{run.pipelineId.slice(0, 8)}</button>
922
1040
  <span className="text-[8px] text-[var(--text-secondary)] ml-auto">{new Date(run.createdAt).toLocaleString([], { month: 'short', day: 'numeric', hour: '2-digit', minute: '2-digit' })}</span>
923
1041
  </div>
924
1042
  {run.summary && (
925
1043
  <pre className="text-[9px] text-[var(--text-secondary)] mt-1 whitespace-pre-wrap break-words line-clamp-3">{run.summary}</pre>
926
1044
  )}
927
1045
  </div>
928
- <button
929
- onClick={async () => {
930
- if (!confirm('Delete this run?')) return;
931
- await fetch('/api/project-pipelines', {
932
- method: 'POST',
933
- headers: { 'Content-Type': 'application/json' },
934
- body: JSON.stringify({ action: 'delete-run', id: run.id }),
935
- });
936
- fetchPipelineBindings();
937
- }}
938
- className="text-[8px] text-[var(--text-secondary)] hover:text-[var(--red)] shrink-0"
939
- >×</button>
1046
+ <div className="flex items-center gap-1 shrink-0">
1047
+ {run.status === 'failed' && run.dedupKey && (
1048
+ <button
1049
+ onClick={async () => {
1050
+ await fetch('/api/project-pipelines', {
1051
+ method: 'POST',
1052
+ headers: { 'Content-Type': 'application/json' },
1053
+ body: JSON.stringify({ action: 'reset-dedup', projectPath, workflowName: run.workflowName, dedupKey: run.dedupKey }),
1054
+ });
1055
+ // Delete the failed run then re-scan
1056
+ await fetch('/api/project-pipelines', {
1057
+ method: 'POST',
1058
+ headers: { 'Content-Type': 'application/json' },
1059
+ body: JSON.stringify({ action: 'delete-run', id: run.id }),
1060
+ });
1061
+ fetchPipelineBindings();
1062
+ }}
1063
+ className="text-[8px] text-[var(--accent)] hover:underline"
1064
+ >Retry</button>
1065
+ )}
1066
+ <button
1067
+ onClick={async () => {
1068
+ if (!confirm('Delete this run?')) return;
1069
+ await fetch('/api/project-pipelines', {
1070
+ method: 'POST',
1071
+ headers: { 'Content-Type': 'application/json' },
1072
+ body: JSON.stringify({ action: 'delete-run', id: run.id }),
1073
+ });
1074
+ fetchPipelineBindings();
1075
+ }}
1076
+ className="text-[8px] text-[var(--text-secondary)] hover:text-[var(--red)]"
1077
+ >×</button>
1078
+ </div>
940
1079
  </div>
941
1080
  ))}
942
1081
  </div>
@@ -171,8 +171,9 @@ function pushLog(line: string) {
171
171
 
172
172
  export async function startTunnel(localPort: number = parseInt(process.env.PORT || '3000')): Promise<{ url?: string; error?: string }> {
173
173
  console.log(`[tunnel] Starting tunnel on port ${localPort}...`);
174
- // Check if this worker already has a process
175
- if (state.process) {
174
+ // Prevent concurrent starts: state.process is already spawned, or another call is
175
+ // mid-flight between the guard and spawn (the async download window).
176
+ if (state.process || state.status === 'starting') {
176
177
  return state.url ? { url: state.url } : { error: 'Tunnel is starting...' };
177
178
  }
178
179
 
@@ -182,6 +183,13 @@ export async function startTunnel(localPort: number = parseInt(process.env.PORT
182
183
  try { process.kill(saved.pid, 0); return { url: saved.url }; } catch {}
183
184
  }
184
185
 
186
+ // Claim 'starting' before any async work so concurrent callers are blocked
187
+ // from this point onward (pgrep kill + download can take seconds).
188
+ state.status = 'starting';
189
+ state.url = null;
190
+ state.error = null;
191
+ state.log = [];
192
+
185
193
  // Kill ALL existing cloudflared processes to prevent duplicates
186
194
  try {
187
195
  const { execSync } = require('node:child_process');
@@ -191,11 +199,6 @@ export async function startTunnel(localPort: number = parseInt(process.env.PORT
191
199
  }
192
200
  } catch {}
193
201
 
194
- state.status = 'starting';
195
- state.url = null;
196
- state.error = null;
197
- state.log = [];
198
-
199
202
  // Generate new session code for remote login 2FA
200
203
  try {
201
204
  const { rotateSessionCode } = require('./password');
@@ -2,7 +2,7 @@
2
2
 
3
3
  ## What Are Pipelines?
4
4
 
5
- Pipelines chain multiple tasks into a DAG (directed acyclic graph). Each step can depend on previous steps, pass outputs forward, and run in parallel.
5
+ Pipelines chain multiple tasks into a DAG (directed acyclic graph). Each step can depend on previous steps, pass outputs forward, and run in parallel. Pipelines are defined as YAML workflow files.
6
6
 
7
7
  ## YAML Workflow Format
8
8
 
@@ -10,9 +10,10 @@ Pipelines chain multiple tasks into a DAG (directed acyclic graph). Each step ca
10
10
  name: my-workflow
11
11
  description: "What this workflow does"
12
12
  input:
13
- feature: "Feature description"
13
+ feature: "Feature description" # required input fields
14
+ priority: "Priority level (optional)"
14
15
  vars:
15
- project: my-app
16
+ project: my-app # default variables
16
17
  nodes:
17
18
  design:
18
19
  project: "{{vars.project}}"
@@ -23,7 +24,10 @@ nodes:
23
24
  implement:
24
25
  project: "{{vars.project}}"
25
26
  depends_on: [design]
26
- prompt: "Implement: {{nodes.design.outputs.spec}}"
27
+ prompt: "Implement based on: {{nodes.design.outputs.spec}}"
28
+ outputs:
29
+ - name: diff
30
+ extract: git_diff
27
31
  review:
28
32
  project: "{{vars.project}}"
29
33
  depends_on: [implement]
@@ -32,30 +36,252 @@ nodes:
32
36
 
33
37
  ## Node Options
34
38
 
35
- | Field | Description |
36
- |-------|-------------|
37
- | `project` | Project name (supports `{{vars.xxx}}` templates) |
38
- | `prompt` | Claude Code prompt or shell command |
39
- | `mode` | `claude` (default) or `shell` |
40
- | `branch` | Auto-checkout branch before running |
41
- | `depends_on` | List of node IDs that must complete first |
42
- | `outputs` | Extract results: `result`, `git_diff`, or `stdout` |
43
- | `routes` | Conditional routing to next nodes |
39
+ | Field | Description | Default |
40
+ |-------|-------------|---------|
41
+ | `project` | Project name (supports templates) | required |
42
+ | `prompt` | Claude Code prompt or shell command | required |
43
+ | `mode` | `claude` (AI agent) or `shell` (raw command) | `claude` |
44
+ | `branch` | Auto-checkout branch before running (supports templates) | none |
45
+ | `depends_on` | List of node IDs that must complete first | `[]` |
46
+ | `outputs` | Extract results (see Output Extraction) | `[]` |
47
+ | `routes` | Conditional routing to next nodes (see Routing) | `[]` |
48
+ | `max_iterations` | Max loop iterations for routed nodes | `3` |
49
+
50
+ ## Node Modes
51
+
52
+ ### `claude` (default)
53
+ Runs the prompt via Claude Code (`claude -p`). The AI agent reads the codebase, makes changes, and returns a result.
54
+
55
+ ### `shell`
56
+ Runs the prompt as a raw shell command (`bash -c "..."`). Useful for git operations, CLI tools, API calls, etc.
57
+
58
+ ```yaml
59
+ nodes:
60
+ setup:
61
+ mode: shell
62
+ project: my-app
63
+ prompt: |
64
+ git checkout main && git pull && echo "READY"
65
+ outputs:
66
+ - name: info
67
+ extract: stdout
68
+ ```
69
+
70
+ **Shell escaping**: Template values in shell mode are automatically escaped (single quotes `'` → `'\''`) to prevent injection.
44
71
 
45
72
  ## Template Variables
46
73
 
47
- - `{{input.xxx}}` pipeline input values
48
- - `{{vars.xxx}}` — workflow variables
49
- - `{{nodes.xxx.outputs.yyy}}` — outputs from previous nodes
74
+ Templates use `{{...}}` syntax and are resolved before execution:
75
+
76
+ - `{{input.xxx}}` — pipeline input values provided at trigger time
77
+ - `{{vars.xxx}}` — workflow-level variables defined in YAML
78
+ - `{{nodes.<node_id>.outputs.<output_name>}}` — outputs from completed nodes
79
+
80
+ Node IDs can contain hyphens (e.g., `{{nodes.fetch-issue.outputs.data}}`).
81
+
82
+ ### Examples
83
+
84
+ ```yaml
85
+ prompt: "Fix issue #{{input.issue_id}} in {{input.project}}"
86
+ prompt: "Based on: {{nodes.design.outputs.spec}}"
87
+ prompt: |
88
+ REPO={{nodes.setup.outputs.repo}} && \
89
+ gh pr create --title "Fix #{{input.issue_id}}" -R "$REPO"
90
+ ```
91
+
92
+ ## Output Extraction
93
+
94
+ Each node can extract outputs for downstream nodes:
95
+
96
+ | Extract Type | Description |
97
+ |-------------|-------------|
98
+ | `result` | Claude's final response text |
99
+ | `stdout` | Shell command stdout (same as result for shell mode) |
100
+ | `git_diff` | Git diff of changes made during the task |
101
+
102
+ ```yaml
103
+ outputs:
104
+ - name: summary
105
+ extract: result
106
+ - name: changes
107
+ extract: git_diff
108
+ ```
109
+
110
+ ## Skip Convention (`__SKIP__`)
111
+
112
+ If a shell node outputs `__SKIP__` in its stdout and exits with code 0, the node is marked as `skipped` instead of `done`. All downstream dependent nodes are also skipped. The pipeline completes successfully (not failed).
113
+
114
+ ```yaml
115
+ nodes:
116
+ check:
117
+ mode: shell
118
+ project: my-app
119
+ prompt: |
120
+ if [ -z "{{input.issue_id}}" ]; then
121
+ echo "__SKIP__ No issue_id provided"
122
+ exit 0
123
+ fi
124
+ echo "Processing issue {{input.issue_id}}"
125
+ ```
126
+
127
+ Use this for optional steps that should gracefully skip when preconditions aren't met.
128
+
129
+ ## Conditional Routing
130
+
131
+ Nodes can route to different next steps based on output content:
132
+
133
+ ```yaml
134
+ nodes:
135
+ analyze:
136
+ project: my-app
137
+ prompt: "Analyze the issue. Reply SIMPLE or COMPLEX."
138
+ outputs:
139
+ - name: complexity
140
+ extract: result
141
+ routes:
142
+ - condition: "{{outputs.complexity contains 'SIMPLE'}}"
143
+ next: quick-fix
144
+ - condition: default
145
+ next: deep-fix
146
+ quick-fix:
147
+ depends_on: [analyze]
148
+ project: my-app
149
+ prompt: "Apply a quick fix"
150
+ deep-fix:
151
+ depends_on: [analyze]
152
+ project: my-app
153
+ prompt: "Do a thorough analysis and fix"
154
+ ```
155
+
156
+ ### Route Conditions
157
+
158
+ - `{{outputs.<name> contains '<keyword>'}}` — check if output contains a keyword
159
+ - `default` — fallback route (always matches)
160
+
161
+ ### Loops
162
+
163
+ If a route points back to the same node, it creates a loop (up to `max_iterations`):
164
+
165
+ ```yaml
166
+ nodes:
167
+ fix-and-test:
168
+ project: my-app
169
+ prompt: "Fix the failing test, then run tests."
170
+ max_iterations: 5
171
+ outputs:
172
+ - name: test_result
173
+ extract: result
174
+ routes:
175
+ - condition: "{{outputs.test_result contains 'PASS'}}"
176
+ next: done
177
+ - condition: default
178
+ next: fix-and-test # loop back to retry
179
+ done:
180
+ depends_on: [fix-and-test]
181
+ mode: shell
182
+ project: my-app
183
+ prompt: "echo 'All tests passing!'"
184
+ ```
185
+
186
+ ## Branch Auto-checkout
187
+
188
+ Nodes can auto-checkout a git branch before execution:
189
+
190
+ ```yaml
191
+ nodes:
192
+ work:
193
+ project: my-app
194
+ branch: "feature/{{input.feature_name}}"
195
+ prompt: "Implement the feature"
196
+ ```
197
+
198
+ ## Parallel Execution
199
+
200
+ Nodes without dependency relationships run in parallel:
201
+
202
+ ```yaml
203
+ nodes:
204
+ frontend:
205
+ project: my-app
206
+ prompt: "Build frontend component"
207
+ backend:
208
+ project: my-app
209
+ prompt: "Build API endpoint"
210
+ integration:
211
+ depends_on: [frontend, backend] # waits for both
212
+ project: my-app
213
+ prompt: "Integration test"
214
+ ```
215
+
216
+ `frontend` and `backend` run simultaneously; `integration` starts when both finish.
50
217
 
51
218
  ## Built-in Workflows
52
219
 
53
220
  ### issue-fix-and-review
54
- Complete issue resolution pipeline: fetch issue → fix code create PRreview code → notify.
221
+ Complete issue resolution: fetch GitHub issue → fix code on new branchcreate PR.
222
+
223
+ **Input**: `issue_id`, `project`, `base_branch` (optional), `extra_context` (optional)
224
+
225
+ **Steps**: setup → fetch-issue → fix-code → push-and-pr → notify
226
+
227
+ **Prerequisites**: `gh` CLI installed and authenticated (`gh auth login`), project has GitHub remote.
228
+
229
+ ### pr-review
230
+ Review a pull request: fetch PR diff → AI review → report.
231
+
232
+ **Input**: `pr_number`, `project`
233
+
234
+ **Steps**: setup → fetch-pr → review → post-review
55
235
 
56
- Steps: setup fetch-issue → fix-code → push-and-pr → review → cleanup
236
+ ## Project Pipeline Bindings
57
237
 
58
- Input: `issue_id`, `project`, `base_branch` (optional), `extra_context` (optional)
238
+ Projects can bind workflows for easy access and scheduled execution.
239
+
240
+ ### Binding a Workflow to a Project
241
+
242
+ 1. Go to **Projects → select project → Pipelines tab**
243
+ 2. Click **+ Add** to attach a workflow
244
+ 3. Configure:
245
+ - **Enabled**: toggle on/off
246
+ - **Schedule**: Manual only, or periodic (15min to 24h intervals)
247
+ 4. Click **Run** to manually trigger
248
+
249
+ ### Scheduled Execution
250
+
251
+ When a schedule is set (e.g., "Every 30 min"):
252
+ - The scheduler checks all bindings every 60 seconds
253
+ - If the interval has elapsed since last run, the pipeline triggers automatically
254
+ - Running pipelines are not re-triggered (prevents overlap)
255
+ - `Last run` and `Next run` times are shown in the UI
256
+
257
+ Schedule options: Manual only, 15min, 30min, 1h, 2h, 6h, 12h, 24h.
258
+
259
+ ### API
260
+
261
+ ```bash
262
+ # List bindings + runs + workflows for a project
263
+ curl "http://localhost:3000/api/project-pipelines?project=/path/to/project"
264
+
265
+ # Add binding
266
+ curl -X POST http://localhost:3000/api/project-pipelines \
267
+ -H 'Content-Type: application/json' \
268
+ -d '{"action":"add","projectPath":"/path","projectName":"my-app","workflowName":"issue-fix-and-review"}'
269
+
270
+ # Update binding (enable/disable, change config/schedule)
271
+ curl -X POST http://localhost:3000/api/project-pipelines \
272
+ -H 'Content-Type: application/json' \
273
+ -d '{"action":"update","projectPath":"/path","workflowName":"issue-fix-and-review","config":{"interval":30}}'
274
+
275
+ # Trigger pipeline manually
276
+ curl -X POST http://localhost:3000/api/project-pipelines \
277
+ -H 'Content-Type: application/json' \
278
+ -d '{"action":"trigger","projectPath":"/path","projectName":"my-app","workflowName":"issue-fix-and-review","input":{"issue_id":"42"}}'
279
+
280
+ # Remove binding
281
+ curl -X POST http://localhost:3000/api/project-pipelines \
282
+ -H 'Content-Type: application/json' \
283
+ -d '{"action":"remove","projectPath":"/path","workflowName":"issue-fix-and-review"}'
284
+ ```
59
285
 
60
286
  ## CLI
61
287
 
@@ -66,7 +292,7 @@ forge run my-workflow # execute a workflow
66
292
 
67
293
  ## Import a Workflow
68
294
 
69
- 1. In Pipelines tab, click **Import**
295
+ 1. In **Pipelines** tab, click **Import**
70
296
  2. Paste YAML workflow content
71
297
  3. Click **Save Workflow**
72
298
 
@@ -79,10 +305,25 @@ To create a workflow via Help AI: ask "Create a pipeline that does X" — the AI
79
305
  ```bash
80
306
  curl -X POST http://localhost:3000/api/pipelines \
81
307
  -H 'Content-Type: application/json' \
82
- -d '{"action": "save-workflow", "yaml": "name: my-flow\nnodes:\n step1:\n project: my-project\n prompt: do something"}'
308
+ -d '{"action": "save-workflow", "yaml": "<yaml content>"}'
83
309
  ```
84
310
 
311
+ ## Pipeline Model
312
+
313
+ In **Settings → Pipeline Model**, you can select which Claude model runs pipeline tasks. Set to `default` to use the same model as regular tasks.
314
+
85
315
  ## Storage
86
316
 
87
317
  - Workflow YAML: `~/.forge/data/flows/`
88
318
  - Execution state: `~/.forge/data/pipelines/`
319
+ - Binding config & run history: SQLite database (`~/.forge/data/forge.db`)
320
+
321
+ ## Tips for Writing Workflows
322
+
323
+ 1. **Start with shell nodes** for setup (git checkout, environment checks)
324
+ 2. **Use `__SKIP__`** for optional steps with precondition checks
325
+ 3. **Extract outputs** to pass data between nodes
326
+ 4. **Use routes** for conditional logic (simple/complex paths, retry loops)
327
+ 5. **Keep prompts focused** — each node should do one thing well
328
+ 6. **Test manually first** before setting up schedules
329
+ 7. **Use `depends_on`** to control execution order; nodes without dependencies run in parallel
@@ -2,7 +2,7 @@
2
2
 
3
3
  ## Overview
4
4
 
5
- Automatically scan GitHub Issues, fix code, create PRs, and review — all hands-free.
5
+ Automatically scan GitHub Issues, fix code, create PRs — all hands-free. Uses the built-in `issue-fix-and-review` pipeline workflow with integrated issue scanning.
6
6
 
7
7
  ## Prerequisites
8
8
 
@@ -11,41 +11,45 @@ Automatically scan GitHub Issues, fix code, create PRs, and review — all hands
11
11
 
12
12
  ## Setup
13
13
 
14
- 1. Go to **Projects → select project → Issues tab**
15
- 2. Enable **Issue Auto-fix**
16
- 3. Configure:
17
- - **Scan Interval**: minutes between scans (0 = manual only)
18
- - **Base Branch**: leave empty for auto-detect (main/master)
19
- - **Labels Filter**: comma-separated labels (empty = all issues)
20
- 4. Click **Scan Now** to test
14
+ 1. Go to **Projects → select project → Pipelines tab**
15
+ 2. Click **+ Add** and select `issue-fix-and-review`
16
+ 3. Enable the binding
17
+ 4. Check **Auto-scan GitHub Issues** to enable automatic scanning
18
+ 5. Configure:
19
+ - **Schedule**: How often to scan (e.g., Every 30 min)
20
+ - **Labels**: Filter issues by label (comma-separated, empty = all)
21
+ - **Base Branch**: Leave empty for auto-detect (main/master)
22
+ 6. Click **Scan** to manually trigger a scan
21
23
 
22
24
  ## Flow
23
25
 
24
26
  ```
25
- Scan Fetch Issue Fix Code (new branch) → Push → Create PR → Auto Review → Notify
27
+ Scan IssuesFor each new issue:
28
+ Setup → Fetch Issue → Fix Code (new branch) → Push & Create PR → Notify
26
29
  ```
27
30
 
28
31
  1. **Scan**: `gh issue list` finds open issues matching labels
29
- 2. **Fix**: Claude Code analyzes issue and fixes code on `fix/<id>-<description>` branch
30
- 3. **PR**: Pushes branch and creates Pull Request
31
- 4. **Review**: AI reviews the code changes in the same pipeline
32
- 5. **Notify**: Results sent via Telegram (if configured)
32
+ 2. **Dedup**: Already-processed issues are skipped (tracked in `pipeline_runs`)
33
+ 3. **Setup**: Checks for clean working directory, detects repo and base branch
34
+ 4. **Fetch Issue**: `gh issue view` fetches issue data
35
+ 5. **Fix Code**: Claude analyzes issue and fixes code on `fix/<id>-<description>` branch
36
+ 6. **Push & PR**: Pushes branch and creates Pull Request via `gh pr create`
37
+ 7. **Notify**: Switches back to original branch, reports PR URL
33
38
 
34
39
  ## Manual Trigger
35
40
 
36
- Enter an issue number in "Manual Trigger" section and click "Fix Issue".
41
+ - **Run** button: Triggers the workflow with custom input (requires `issue_id`)
42
+ - **Scan** button: Scans for all open issues and triggers fixes for new ones
37
43
 
38
- ## Retry
44
+ ## Dedup
39
45
 
40
- Failed fixes show a "Retry" button. Click to provide additional context (e.g. "rebase from main first") and re-run.
46
+ Each processed issue is tracked with a `dedup_key` (e.g., `issue:42`) in the pipeline runs table. Once an issue has been processed, it won't be triggered again even if it's still open. To re-process an issue, delete its run from the execution history.
41
47
 
42
48
  ## Safety
43
49
 
44
50
  - Checks for uncommitted changes before starting (aborts if dirty)
45
- - Always works on new branches (never modifies main)
51
+ - Always works on new branches (never modifies main/master)
52
+ - Cleans up old fix branches for the same issue
46
53
  - Switches back to original branch after completion
47
- - Existing PRs are updated, not duplicated
48
-
49
- ## Processed Issues
50
-
51
- History shows all processed issues with status (processing/done/failed), PR number, and pipeline ID. Click pipeline ID to view details.
54
+ - Uses `--force-with-lease` for safe push
55
+ - Running pipelines are not re-triggered (one fix per issue at a time)
@@ -0,0 +1,41 @@
1
+ You are a help assistant for **Forge** — a self-hosted Vibe Coding platform.
2
+
3
+ Your job is to answer user questions about Forge features, configuration, and troubleshooting.
4
+
5
+ ## How to answer
6
+
7
+ 1. Read the relevant documentation file(s) from this directory before answering
8
+ 2. Base your answers on the documentation content, not assumptions
9
+ 3. If the answer isn't in the docs, say so honestly
10
+ 4. Give concise, actionable answers with code examples when helpful
11
+ 5. When generating files (YAML workflows, configs, scripts, etc.), **always save the file directly** to the appropriate directory rather than printing it. For pipeline workflows, save to `~/.forge/data/flows/<name>.yaml`. Tell the user the file path so they can find it. The terminal does not support copy/paste.
12
+
13
+ ## Available documentation
14
+
15
+ | File | Topic |
16
+ |------|-------|
17
+ | `00-overview.md` | Installation, startup, data paths, architecture |
18
+ | `01-settings.md` | All settings fields and configuration |
19
+ | `02-telegram.md` | Telegram bot setup and commands |
20
+ | `03-tunnel.md` | Remote access via Cloudflare tunnel |
21
+ | `04-tasks.md` | Background task system |
22
+ | `05-pipelines.md` | Pipeline/workflow engine — YAML format, nodes, templates, routing, scheduling, project bindings |
23
+ | `06-skills.md` | Skills marketplace and installation |
24
+ | `07-projects.md` | Project management |
25
+ | `08-rules.md` | CLAUDE.md templates and rule injection |
26
+ | `09-issue-autofix.md` | GitHub issue auto-fix pipeline |
27
+ | `10-troubleshooting.md` | Common issues and solutions |
28
+
29
+ ## Matching questions to docs
30
+
31
+ - Pipeline/workflow/DAG/YAML → `05-pipelines.md`
32
+ - Issue/PR/auto-fix → `09-issue-autofix.md` + `05-pipelines.md`
33
+ - Telegram/notification → `02-telegram.md`
34
+ - Tunnel/remote/cloudflare → `03-tunnel.md`
35
+ - Task/background/queue → `04-tasks.md`
36
+ - Settings/config → `01-settings.md`
37
+ - Install/start/update → `00-overview.md`
38
+ - Error/bug/crash → `10-troubleshooting.md`
39
+ - Skill/marketplace → `06-skills.md`
40
+ - Project/favorite → `07-projects.md`
41
+ - Rules/CLAUDE.md/template → `08-rules.md`
package/lib/init.ts CHANGED
@@ -95,18 +95,12 @@ export function ensureInitialized() {
95
95
  // Session watcher is safe (file-based, idempotent)
96
96
  startWatcherLoop();
97
97
 
98
- // Pipeline scheduler — periodic execution for project-bound workflows
98
+ // Pipeline scheduler — periodic execution + issue scanning for project-bound workflows
99
99
  try {
100
100
  const { startScheduler } = require('./pipeline-scheduler');
101
101
  startScheduler();
102
102
  } catch {}
103
103
 
104
- // Legacy issue scanner (still used if issue_autofix_config has entries)
105
- try {
106
- const { startScanner } = require('./issue-scanner');
107
- startScanner();
108
- } catch {}
109
-
110
104
  // If services are managed externally (forge-server), skip
111
105
  if (process.env.FORGE_EXTERNAL_SERVICES === '1') {
112
106
  // Password display
@@ -1,27 +1,36 @@
1
1
  /**
2
- * Pipeline Scheduler — manages project-pipeline bindings and scheduled execution.
3
- * Replaces issue-scanner with a generic approach.
2
+ * Pipeline Scheduler — manages project-pipeline bindings, scheduled execution,
3
+ * and issue scanning (replaces issue-scanner.ts).
4
4
  *
5
5
  * Each project can bind multiple workflows. Each binding has:
6
- * - config: JSON with workflow-specific settings (e.g. interval, labels for issue pipelines)
6
+ * - config: JSON with workflow-specific settings (interval, scanType, labels, baseBranch)
7
7
  * - enabled: on/off toggle
8
8
  * - scheduled execution via config.interval (minutes, 0 = manual only)
9
+ * - config.scanType: 'github-issues' enables automatic issue scanning + dedup
9
10
  */
10
11
 
11
12
  import { getDb } from '@/src/core/db/database';
12
13
  import { getDbPath } from '@/src/config';
13
14
  import { startPipeline, getPipeline } from './pipeline';
14
15
  import { randomUUID } from 'node:crypto';
16
+ import { execSync } from 'node:child_process';
15
17
 
16
18
  function db() { return getDb(getDbPath()); }
17
19
 
20
+ /** Normalize SQLite datetime('now') → ISO 8601 UTC string. */
21
+ function toIsoUTC(s: string | null): string | null {
22
+ if (!s) return null;
23
+ if (/^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$/.test(s)) return s.replace(' ', 'T') + 'Z';
24
+ return s;
25
+ }
26
+
18
27
  export interface ProjectPipelineBinding {
19
28
  id: number;
20
29
  projectPath: string;
21
30
  projectName: string;
22
31
  workflowName: string;
23
32
  enabled: boolean;
24
- config: Record<string, any>; // interval (minutes), labels, baseBranch, etc.
33
+ config: Record<string, any>; // interval, scanType, labels, baseBranch, etc.
25
34
  lastRunAt: string | null;
26
35
  createdAt: string;
27
36
  }
@@ -33,6 +42,7 @@ export interface PipelineRun {
33
42
  pipelineId: string;
34
43
  status: string;
35
44
  summary: string;
45
+ dedupKey: string | null;
36
46
  createdAt: string;
37
47
  }
38
48
 
@@ -47,8 +57,8 @@ export function getBindings(projectPath: string): ProjectPipelineBinding[] {
47
57
  workflowName: r.workflow_name,
48
58
  enabled: !!r.enabled,
49
59
  config: JSON.parse(r.config || '{}'),
50
- lastRunAt: r.last_run_at || null,
51
- createdAt: r.created_at,
60
+ lastRunAt: toIsoUTC(r.last_run_at),
61
+ createdAt: toIsoUTC(r.created_at) ?? r.created_at,
52
62
  }));
53
63
  }
54
64
 
@@ -61,8 +71,8 @@ export function getAllScheduledBindings(): ProjectPipelineBinding[] {
61
71
  workflowName: r.workflow_name,
62
72
  enabled: true,
63
73
  config: JSON.parse(r.config || '{}'),
64
- lastRunAt: r.last_run_at || null,
65
- createdAt: r.created_at,
74
+ lastRunAt: toIsoUTC(r.last_run_at),
75
+ createdAt: toIsoUTC(r.created_at) ?? r.created_at,
66
76
  })).filter(b => b.config.interval && b.config.interval > 0);
67
77
  }
68
78
 
@@ -96,12 +106,12 @@ function updateLastRunAt(projectPath: string, workflowName: string): void {
96
106
 
97
107
  // ─── Runs ────────────────────────────────────────────────
98
108
 
99
- export function recordRun(projectPath: string, workflowName: string, pipelineId: string): string {
109
+ export function recordRun(projectPath: string, workflowName: string, pipelineId: string, dedupKey?: string): string {
100
110
  const id = randomUUID().slice(0, 8);
101
111
  db().prepare(`
102
- INSERT INTO pipeline_runs (id, project_path, workflow_name, pipeline_id, status)
103
- VALUES (?, ?, ?, ?, 'running')
104
- `).run(id, projectPath, workflowName, pipelineId);
112
+ INSERT INTO pipeline_runs (id, project_path, workflow_name, pipeline_id, status, dedup_key)
113
+ VALUES (?, ?, ?, ?, 'running', ?)
114
+ `).run(id, projectPath, workflowName, pipelineId, dedupKey || null);
105
115
  return id;
106
116
  }
107
117
 
@@ -127,7 +137,8 @@ export function getRuns(projectPath: string, workflowName?: string, limit = 20):
127
137
  pipelineId: r.pipeline_id,
128
138
  status: r.status,
129
139
  summary: r.summary || '',
130
- createdAt: r.created_at,
140
+ dedupKey: r.dedup_key || null,
141
+ createdAt: toIsoUTC(r.created_at) ?? r.created_at,
131
142
  }));
132
143
  }
133
144
 
@@ -135,18 +146,36 @@ export function deleteRun(id: string): void {
135
146
  db().prepare('DELETE FROM pipeline_runs WHERE id = ?').run(id);
136
147
  }
137
148
 
149
+ // ─── Dedup ──────────────────────────────────────────────
150
+
151
+ function isDuplicate(projectPath: string, workflowName: string, dedupKey: string): boolean {
152
+ const row = db().prepare(
153
+ 'SELECT 1 FROM pipeline_runs WHERE project_path = ? AND workflow_name = ? AND dedup_key = ?'
154
+ ).get(projectPath, workflowName, dedupKey);
155
+ return !!row;
156
+ }
157
+
158
+ export function resetDedup(projectPath: string, workflowName: string, dedupKey: string): void {
159
+ db().prepare(
160
+ 'DELETE FROM pipeline_runs WHERE project_path = ? AND workflow_name = ? AND dedup_key = ?'
161
+ ).run(projectPath, workflowName, dedupKey);
162
+ }
163
+
138
164
  // ─── Trigger ─────────────────────────────────────────────
139
165
 
140
- export function triggerPipeline(projectPath: string, projectName: string, workflowName: string, extraInput?: Record<string, any>): { pipelineId: string; runId: string } {
166
+ export function triggerPipeline(
167
+ projectPath: string, projectName: string, workflowName: string,
168
+ extraInput?: Record<string, any>, dedupKey?: string
169
+ ): { pipelineId: string; runId: string } {
141
170
  const input: Record<string, string> = {
142
171
  project: projectName,
143
172
  ...extraInput,
144
173
  };
145
174
 
146
175
  const pipeline = startPipeline(workflowName, input);
147
- const runId = recordRun(projectPath, workflowName, pipeline.id);
176
+ const runId = recordRun(projectPath, workflowName, pipeline.id, dedupKey);
148
177
  updateLastRunAt(projectPath, workflowName);
149
- console.log(`[pipeline-scheduler] Triggered ${workflowName} for ${projectName} (pipeline: ${pipeline.id})`);
178
+ console.log(`[pipeline-scheduler] Triggered ${workflowName} for ${projectName} (pipeline: ${pipeline.id}${dedupKey ? ', dedup: ' + dedupKey : ''})`);
150
179
  return { pipelineId: pipeline.id, runId };
151
180
  }
152
181
 
@@ -171,6 +200,94 @@ export function syncRunStatus(pipelineId: string): void {
171
200
  updateRun(pipelineId, pipeline.status, summary.trim());
172
201
  }
173
202
 
203
+ // ─── GitHub Issue Scanning ──────────────────────────────
204
+
205
+ function getRepoFromProject(projectPath: string): string | null {
206
+ try {
207
+ return execSync('gh repo view --json nameWithOwner -q .nameWithOwner', {
208
+ cwd: projectPath, encoding: 'utf-8', timeout: 10000, stdio: ['pipe', 'pipe', 'pipe'],
209
+ }).trim() || null;
210
+ } catch {
211
+ try {
212
+ const url = execSync('git remote get-url origin', {
213
+ cwd: projectPath, encoding: 'utf-8', timeout: 5000, stdio: ['pipe', 'pipe', 'pipe'],
214
+ }).trim();
215
+ return url.replace(/.*github\.com[:/]/, '').replace(/\.git$/, '') || null;
216
+ } catch { return null; }
217
+ }
218
+ }
219
+
220
+ function fetchOpenIssues(projectPath: string, labels: string[]): { number: number; title: string; error?: string }[] {
221
+ const repo = getRepoFromProject(projectPath);
222
+ if (!repo) return [{ number: -1, title: '', error: 'Could not detect GitHub repo. Run: gh auth login' }];
223
+ try {
224
+ const labelFilter = labels.length > 0 ? ` --label "${labels.join(',')}"` : '';
225
+ const out = execSync(`gh issue list --state open --json number,title${labelFilter} -R ${repo}`, {
226
+ cwd: projectPath, encoding: 'utf-8', timeout: 15000, stdio: ['pipe', 'pipe', 'pipe'],
227
+ });
228
+ return JSON.parse(out) || [];
229
+ } catch (e: any) {
230
+ const msg = e.stderr?.toString() || e.message || 'gh CLI failed';
231
+ return [{ number: -1, title: '', error: msg.includes('auth') ? 'GitHub CLI not authenticated. Run: gh auth login' : msg }];
232
+ }
233
+ }
234
+
235
+ export function scanAndTriggerIssues(binding: ProjectPipelineBinding): { triggered: number; issues: number[]; total: number; pending: number; error?: string } {
236
+ const labels: string[] = binding.config.labels || [];
237
+ const issues = fetchOpenIssues(binding.projectPath, labels);
238
+
239
+ // Check for errors
240
+ if (issues.length === 1 && (issues[0] as any).error) {
241
+ return { triggered: 0, issues: [], total: 0, pending: 0, error: (issues[0] as any).error };
242
+ }
243
+
244
+ // Check if there's already a running pipeline for this project+workflow — only one at a time
245
+ // to prevent concurrent git operations on the same repo
246
+ const recentRuns = getRuns(binding.projectPath, binding.workflowName, 5);
247
+ const hasRunning = recentRuns.some(r => r.status === 'running');
248
+
249
+ const newIssues: { number: number; title: string }[] = [];
250
+ for (const issue of issues) {
251
+ if (issue.number < 0) continue;
252
+ const dedupKey = `issue:${issue.number}`;
253
+ if (!isDuplicate(binding.projectPath, binding.workflowName, dedupKey)) {
254
+ newIssues.push(issue);
255
+ }
256
+ }
257
+
258
+ if (newIssues.length === 0) {
259
+ updateLastRunAt(binding.projectPath, binding.workflowName);
260
+ return { triggered: 0, issues: [], total: issues.length, pending: 0 };
261
+ }
262
+
263
+ // Only trigger ONE issue at a time to avoid concurrent git conflicts
264
+ // Next issue will be triggered on the next scan cycle
265
+ if (hasRunning) {
266
+ console.log(`[pipeline-scheduler] Issue scan: ${newIssues.length} new issues for ${binding.projectName}, waiting for current pipeline to finish`);
267
+ return { triggered: 0, issues: [], total: issues.length, pending: newIssues.length };
268
+ }
269
+
270
+ const issue = newIssues[0];
271
+ const dedupKey = `issue:${issue.number}`;
272
+ try {
273
+ triggerPipeline(
274
+ binding.projectPath, binding.projectName, binding.workflowName,
275
+ {
276
+ issue_id: String(issue.number),
277
+ base_branch: binding.config.baseBranch || 'auto-detect',
278
+ },
279
+ dedupKey
280
+ );
281
+ console.log(`[pipeline-scheduler] Issue scan: triggered #${issue.number} "${issue.title}" for ${binding.projectName} (${newIssues.length - 1} more pending)`);
282
+ } catch (e: any) {
283
+ console.error(`[pipeline-scheduler] Issue scan: failed to trigger #${issue.number}:`, e.message);
284
+ return { triggered: 0, issues: [], total: issues.length, pending: newIssues.length, error: e.message };
285
+ }
286
+
287
+ updateLastRunAt(binding.projectPath, binding.workflowName);
288
+ return { triggered: 1, issues: [issue.number], total: issues.length, pending: newIssues.length - 1 };
289
+ }
290
+
174
291
  // ─── Periodic Scheduler ─────────────────────────────────
175
292
 
176
293
  const schedulerKey = Symbol.for('forge-pipeline-scheduler');
@@ -210,19 +327,24 @@ function tickScheduler(): void {
210
327
  const lastRun = binding.lastRunAt ? new Date(binding.lastRunAt).getTime() : 0;
211
328
  const elapsed = now - lastRun;
212
329
 
213
- if (elapsed >= intervalMs) {
214
- // Check if there's already a running pipeline for this binding
215
- const recentRuns = getRuns(binding.projectPath, binding.workflowName, 1);
216
- if (recentRuns.length > 0 && recentRuns[0].status === 'running') {
217
- continue; // skip if still running
218
- }
330
+ if (elapsed < intervalMs) continue;
331
+
332
+ try {
333
+ const isIssueWorkflow = binding.workflowName === 'issue-fix-and-review' || binding.workflowName === 'issue-auto-fix' || binding.config.scanType === 'github-issues';
334
+ if (isIssueWorkflow) {
335
+ // Issue scan mode: fetch issues → dedup → trigger per issue
336
+ console.log(`[pipeline-scheduler] Scheduled issue scan: ${binding.workflowName} for ${binding.projectName}`);
337
+ scanAndTriggerIssues(binding);
338
+ } else {
339
+ // Normal mode: single trigger (skip if still running)
340
+ const recentRuns = getRuns(binding.projectPath, binding.workflowName, 1);
341
+ if (recentRuns.length > 0 && recentRuns[0].status === 'running') continue;
219
342
 
220
- try {
221
343
  console.log(`[pipeline-scheduler] Scheduled trigger: ${binding.workflowName} for ${binding.projectName}`);
222
344
  triggerPipeline(binding.projectPath, binding.projectName, binding.workflowName, binding.config.input);
223
- } catch (e: any) {
224
- console.error(`[pipeline-scheduler] Scheduled trigger failed for ${binding.workflowName}:`, e.message);
225
345
  }
346
+ } catch (e: any) {
347
+ console.error(`[pipeline-scheduler] Scheduled trigger failed for ${binding.workflowName}:`, e.message);
226
348
  }
227
349
  }
228
350
  } catch (e: any) {
package/lib/pipeline.ts CHANGED
@@ -92,7 +92,7 @@ nodes:
92
92
  if [ -n "$(git status --porcelain)" ]; then echo "ERROR: Working directory has uncommitted changes. Please commit or stash first." && exit 1; fi && \
93
93
  ORIG_BRANCH=$(git branch --show-current || git rev-parse --short HEAD) && \
94
94
  REPO=$(gh repo view --json nameWithOwner -q .nameWithOwner 2>/dev/null || git remote get-url origin | sed 's/.*github.com[:/]//;s/.git$//') && \
95
- BASE={{input.base_branch}} && \
95
+ BASE="{{input.base_branch}}" && \
96
96
  if [ -z "$BASE" ] || [ "$BASE" = "auto-detect" ]; then BASE=$(git symbolic-ref refs/remotes/origin/HEAD 2>/dev/null | sed 's@^refs/remotes/origin/@@' || echo main); fi && \
97
97
  git checkout "$BASE" 2>/dev/null || true && \
98
98
  git pull origin "$BASE" 2>/dev/null || true && \
@@ -109,7 +109,8 @@ nodes:
109
109
  prompt: |
110
110
  ISSUE_ID="{{input.issue_id}}" && \
111
111
  if [ -z "$ISSUE_ID" ]; then echo "__SKIP__ No issue_id provided" && exit 0; fi && \
112
- REPO=$(echo '{{nodes.setup.outputs.info}}' | grep REPO= | cut -d= -f2) && \
112
+ SETUP_INFO=$'{{nodes.setup.outputs.info}}' && \
113
+ REPO=$(echo "$SETUP_INFO" | grep REPO= | cut -d= -f2) && \
113
114
  gh issue view "$ISSUE_ID" --json title,body,labels,number -R "$REPO"
114
115
  outputs:
115
116
  - name: issue_json
@@ -140,11 +141,12 @@ nodes:
140
141
  project: "{{input.project}}"
141
142
  depends_on: [fix-code]
142
143
  prompt: |
143
- REPO=$(echo '{{nodes.setup.outputs.info}}' | grep REPO= | cut -d= -f2) && \
144
+ SETUP_INFO=$'{{nodes.setup.outputs.info}}' && \
145
+ REPO=$(echo "$SETUP_INFO" | grep REPO= | cut -d= -f2) && \
144
146
  BRANCH=$(git branch --show-current) && \
145
147
  git push -u origin "$BRANCH" --force-with-lease 2>&1 && \
146
- PR_URL=$(gh pr create --title 'Fix #{{input.issue_id}}' \
147
- --body 'Auto-fix by Forge Pipeline for issue #{{input.issue_id}}.' -R "$REPO" 2>/dev/null || \
148
+ PR_URL=$(gh pr create --title "Fix #{{input.issue_id}}" \
149
+ --body "Auto-fix by Forge Pipeline for issue #{{input.issue_id}}." -R "$REPO" 2>/dev/null || \
148
150
  gh pr view "$BRANCH" --json url -q .url -R "$REPO" 2>/dev/null) && \
149
151
  echo "$PR_URL"
150
152
  outputs:
@@ -178,12 +180,14 @@ nodes:
178
180
  project: "{{input.project}}"
179
181
  depends_on: [review]
180
182
  prompt: |
181
- ORIG=$(echo '{{nodes.setup.outputs.info}}' | grep ORIG_BRANCH= | cut -d= -f2) && \
183
+ SETUP_INFO=$'{{nodes.setup.outputs.info}}' && \
184
+ ORIG=$(echo "$SETUP_INFO" | grep ORIG_BRANCH= | cut -d= -f2) && \
185
+ PR_URL=$'{{nodes.push-and-pr.outputs.pr_url}}' && \
182
186
  if [ -n "$(git status --porcelain)" ]; then
183
- echo "Issue #{{input.issue_id}} — PR: {{nodes.push-and-pr.outputs.pr_url}} | Review: {{nodes.review.outputs.review_result}} (staying on $(git branch --show-current))"
187
+ echo "Issue #{{input.issue_id}} — PR: $PR_URL (staying on $(git branch --show-current))"
184
188
  else
185
189
  git checkout "$ORIG" 2>/dev/null || true
186
- echo "Issue #{{input.issue_id}} — PR: {{nodes.push-and-pr.outputs.pr_url}} | Review: {{nodes.review.outputs.review_result}} (switched back to $ORIG)"
190
+ echo "Issue #{{input.issue_id}} — PR: $PR_URL (switched back to $ORIG)"
187
191
  fi
188
192
  outputs:
189
193
  - name: result
@@ -306,12 +310,22 @@ export function listPipelines(): Pipeline[] {
306
310
 
307
311
  // ─── Template Resolution ──────────────────────────────────
308
312
 
309
- /** Escape a string for safe embedding in shell commands (single-quote wrapping) */
313
+ /** Escape a string for safe embedding in single-quoted shell strings */
310
314
  function shellEscape(s: string): string {
311
315
  // Replace single quotes with '\'' (end quote, escaped quote, start quote)
312
316
  return s.replace(/'/g, "'\\''");
313
317
  }
314
318
 
319
+ /** Escape a string for safe embedding in $'...' shell strings (ANSI-C quoting) */
320
+ function shellEscapeAnsiC(s: string): string {
321
+ return s
322
+ .replace(/\\/g, '\\\\')
323
+ .replace(/'/g, "\\'")
324
+ .replace(/\n/g, '\\n')
325
+ .replace(/\r/g, '\\r')
326
+ .replace(/\t/g, '\\t');
327
+ }
328
+
315
329
  function resolveTemplate(template: string, ctx: {
316
330
  input: Record<string, string>;
317
331
  vars: Record<string, string>;
@@ -336,7 +350,7 @@ function resolveTemplate(template: string, ctx: {
336
350
  }
337
351
  }
338
352
 
339
- return shellMode ? shellEscape(value) : value;
353
+ return shellMode ? shellEscapeAnsiC(value) : value;
340
354
  });
341
355
  }
342
356
 
@@ -12,6 +12,13 @@ import { loadSettings } from './settings';
12
12
  import { notifyTaskComplete, notifyTaskFailed } from './notify';
13
13
  import type { Task, TaskLogEntry, TaskStatus, TaskMode, WatchConfig } from '@/src/types';
14
14
 
15
+ /** Normalize SQLite datetime('now') → ISO 8601 UTC string. */
16
+ function toIsoUTC(s: string | null | undefined): string | null {
17
+ if (!s) return null;
18
+ if (/^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$/.test(s)) return s.replace(' ', 'T') + 'Z';
19
+ return s;
20
+ }
21
+
15
22
  const runnerKey = Symbol.for('mw-task-runner');
16
23
  const gRunner = globalThis as any;
17
24
  if (!gRunner[runnerKey]) gRunner[runnerKey] = { runner: null, currentTaskId: null };
@@ -601,10 +608,10 @@ function rowToTask(row: any): Task {
601
608
  gitBranch: row.git_branch || undefined,
602
609
  costUSD: row.cost_usd || undefined,
603
610
  error: row.error || undefined,
604
- createdAt: row.created_at,
605
- startedAt: row.started_at || undefined,
606
- completedAt: row.completed_at || undefined,
607
- scheduledAt: row.scheduled_at || undefined,
611
+ createdAt: toIsoUTC(row.created_at) ?? row.created_at,
612
+ startedAt: toIsoUTC(row.started_at) ?? undefined,
613
+ completedAt: toIsoUTC(row.completed_at) ?? undefined,
614
+ scheduledAt: toIsoUTC(row.scheduled_at) ?? undefined,
608
615
  };
609
616
  }
610
617
 
package/next-env.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  /// <reference types="next" />
2
2
  /// <reference types="next/image-types/global" />
3
- import "./.next/dev/types/routes.d.ts";
3
+ import "./.next/types/routes.d.ts";
4
4
 
5
5
  // NOTE: This file should not be edited
6
6
  // see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aion0/forge",
3
- "version": "0.4.5",
3
+ "version": "0.4.7",
4
4
  "description": "Unified AI workflow platform — multi-model task orchestration, persistent sessions, web terminal, remote access",
5
5
  "type": "module",
6
6
  "scripts": {
@@ -35,6 +35,25 @@ function initSchema(db: Database.Database) {
35
35
  migrate('ALTER TABLE skills ADD COLUMN rating REAL DEFAULT 0');
36
36
  migrate('ALTER TABLE skills ADD COLUMN deleted_remotely INTEGER NOT NULL DEFAULT 0');
37
37
  migrate('ALTER TABLE project_pipelines ADD COLUMN last_run_at TEXT');
38
+ migrate('ALTER TABLE pipeline_runs ADD COLUMN dedup_key TEXT');
39
+ // Unique index for dedup (only applies when dedup_key is NOT NULL)
40
+ try { db.exec('CREATE UNIQUE INDEX IF NOT EXISTS idx_pipeline_runs_dedup ON pipeline_runs(project_path, workflow_name, dedup_key)'); } catch {}
41
+ // Migrate old issue_autofix_processed → pipeline_runs
42
+ try {
43
+ const old = db.prepare('SELECT * FROM issue_autofix_processed').all() as any[];
44
+ if (old.length > 0) {
45
+ const ins = db.prepare('INSERT OR IGNORE INTO pipeline_runs (id, project_path, workflow_name, pipeline_id, status, dedup_key, created_at) VALUES (?, ?, ?, ?, ?, ?, ?)');
46
+ for (const r of old) {
47
+ ins.run(
48
+ r.pipeline_id?.slice(0, 8) || ('mig-' + r.issue_number),
49
+ r.project_path, 'issue-fix-and-review', r.pipeline_id || '',
50
+ r.status === 'processing' ? 'running' : (r.status || 'done'),
51
+ `issue:${r.issue_number}`, r.created_at || new Date().toISOString()
52
+ );
53
+ }
54
+ console.log(`[db] Migrated ${old.length} issue_autofix_processed records to pipeline_runs`);
55
+ }
56
+ } catch {}
38
57
 
39
58
  db.exec(`
40
59
  CREATE TABLE IF NOT EXISTS sessions (