@jiggai/recipes 0.4.21 → 0.4.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/openclaw.plugin.json +1 -1
- package/package.json +1 -1
- package/recipes/default/business-team.md +33 -9
- package/recipes/default/customer-support-team.md +9 -6
- package/recipes/default/marketing-team.md +6 -0
- package/recipes/default/product-team.md +11 -8
- package/recipes/default/research-team.md +9 -6
- package/recipes/default/social-team.md +27 -24
- package/recipes/default/writing-team.md +9 -6
- package/src/handlers/cron.ts +28 -19
- package/src/handlers/team.ts +46 -0
- package/src/lib/recipe-frontmatter.ts +4 -0
- package/src/lib/workflows/workflow-approvals.ts +316 -0
- package/src/lib/workflows/workflow-node-executor.ts +520 -0
- package/src/lib/workflows/workflow-node-output-readers.ts +1 -1
- package/src/lib/workflows/workflow-queue.ts +56 -8
- package/src/lib/workflows/workflow-runner.ts +43 -1934
- package/src/lib/workflows/workflow-tick.ts +196 -0
- package/src/lib/workflows/workflow-types.ts +39 -0
- package/src/lib/workflows/workflow-utils.ts +330 -0
- package/src/lib/workflows/workflow-worker.ts +586 -0
- package/src/toolsInvoke.ts +1 -1
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import crypto from 'node:crypto';
|
|
4
|
+
import type { OpenClawPluginApi } from 'openclaw/plugin-sdk';
|
|
5
|
+
import { resolveTeamDir } from '../workspace';
|
|
6
|
+
import type { RunLog } from './workflow-types';
|
|
7
|
+
import { enqueueTask } from './workflow-queue';
|
|
8
|
+
import { readTextFile } from './workflow-runner-io';
|
|
9
|
+
import {
|
|
10
|
+
normalizeWorkflow,
|
|
11
|
+
fileExists,
|
|
12
|
+
appendRunLog, writeRunFile, loadRunFile,
|
|
13
|
+
pickNextRunnableNodeIndex,
|
|
14
|
+
} from './workflow-utils';
|
|
15
|
+
|
|
16
|
+
// eslint-disable-next-line complexity, max-lines-per-function
|
|
17
|
+
export async function runWorkflowRunnerTick(api: OpenClawPluginApi, opts: {
|
|
18
|
+
teamId: string;
|
|
19
|
+
concurrency?: number;
|
|
20
|
+
leaseSeconds?: number;
|
|
21
|
+
}) {
|
|
22
|
+
const teamId = String(opts.teamId);
|
|
23
|
+
const teamDir = resolveTeamDir(api, teamId);
|
|
24
|
+
const sharedContextDir = path.join(teamDir, 'shared-context');
|
|
25
|
+
const runsDir = path.join(sharedContextDir, 'workflow-runs');
|
|
26
|
+
const workflowsDir = path.join(sharedContextDir, 'workflows');
|
|
27
|
+
|
|
28
|
+
if (!(await fileExists(runsDir))) {
|
|
29
|
+
return { ok: true as const, teamId, claimed: 0, message: 'No workflow-runs directory present.' };
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const concurrency = typeof opts.concurrency === 'number' && opts.concurrency > 0 ? Math.floor(opts.concurrency) : 1;
|
|
33
|
+
const leaseSeconds = typeof opts.leaseSeconds === 'number' && opts.leaseSeconds > 0 ? opts.leaseSeconds : 300;
|
|
34
|
+
const now = Date.now();
|
|
35
|
+
|
|
36
|
+
const entries = await fs.readdir(runsDir);
|
|
37
|
+
const candidates: Array<{ file: string; run: RunLog }> = [];
|
|
38
|
+
|
|
39
|
+
for (const e of entries) {
|
|
40
|
+
const abs = path.join(runsDir, e);
|
|
41
|
+
|
|
42
|
+
let runPath: string | null = null;
|
|
43
|
+
try {
|
|
44
|
+
const st = await fs.stat(abs);
|
|
45
|
+
if (st.isDirectory()) {
|
|
46
|
+
const p = path.join(abs, 'run.json');
|
|
47
|
+
if (await fileExists(p)) runPath = p;
|
|
48
|
+
}
|
|
49
|
+
} catch { // intentional: best-effort directory traversal
|
|
50
|
+
// ignore
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
if (!runPath) continue;
|
|
54
|
+
|
|
55
|
+
try {
|
|
56
|
+
const run = JSON.parse(await readTextFile(runPath)) as RunLog;
|
|
57
|
+
if (run.status !== 'queued') continue;
|
|
58
|
+
const exp = run.claimExpiresAt ? Date.parse(String(run.claimExpiresAt)) : 0;
|
|
59
|
+
const claimed = !!run.claimedBy && exp > now;
|
|
60
|
+
if (claimed) continue;
|
|
61
|
+
candidates.push({ file: runPath, run });
|
|
62
|
+
} catch { // intentional: skip malformed run.json
|
|
63
|
+
// ignore parse errors
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if (!candidates.length) {
|
|
68
|
+
return { ok: true as const, teamId, claimed: 0, message: 'No queued runs available.' };
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
candidates.sort((a, b) => {
|
|
72
|
+
const pa = typeof a.run.priority === 'number' ? a.run.priority : 0;
|
|
73
|
+
const pb = typeof b.run.priority === 'number' ? b.run.priority : 0;
|
|
74
|
+
if (pa !== pb) return pb - pa;
|
|
75
|
+
return String(a.run.createdAt).localeCompare(String(b.run.createdAt));
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
const runnerIdBase = `workflow-runner:${process.pid}`;
|
|
79
|
+
|
|
80
|
+
async function tryClaim(runPath: string): Promise<RunLog | null> {
|
|
81
|
+
const raw = await readTextFile(runPath);
|
|
82
|
+
const cur = JSON.parse(raw) as RunLog;
|
|
83
|
+
if (cur.status !== 'queued') return null;
|
|
84
|
+
const exp = cur.claimExpiresAt ? Date.parse(String(cur.claimExpiresAt)) : 0;
|
|
85
|
+
const claimed = !!cur.claimedBy && exp > Date.now();
|
|
86
|
+
if (claimed) return null;
|
|
87
|
+
|
|
88
|
+
const claimExpiresAt = new Date(Date.now() + leaseSeconds * 1000).toISOString();
|
|
89
|
+
const claimedBy = `${runnerIdBase}:${crypto.randomBytes(3).toString('hex')}`;
|
|
90
|
+
|
|
91
|
+
const next: RunLog = {
|
|
92
|
+
...cur,
|
|
93
|
+
updatedAt: new Date().toISOString(),
|
|
94
|
+
status: 'running',
|
|
95
|
+
claimedBy,
|
|
96
|
+
claimExpiresAt,
|
|
97
|
+
events: [...(cur.events ?? []), { ts: new Date().toISOString(), type: 'run.claimed', claimedBy, claimExpiresAt }],
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
await fs.writeFile(runPath, JSON.stringify(next, null, 2), 'utf8');
|
|
101
|
+
return next;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const claimed: Array<{ file: string; run: RunLog }> = [];
|
|
105
|
+
for (const c of candidates) {
|
|
106
|
+
if (claimed.length >= concurrency) break;
|
|
107
|
+
const run = await tryClaim(c.file);
|
|
108
|
+
if (run) claimed.push({ file: c.file, run });
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (!claimed.length) {
|
|
112
|
+
return { ok: true as const, teamId, claimed: 0, message: 'No queued runs available (raced on claim).' };
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
async function execClaimed(runPath: string, run: RunLog) {
|
|
116
|
+
const workflowFile = String(run.workflow.file);
|
|
117
|
+
const workflowPath = path.join(workflowsDir, workflowFile);
|
|
118
|
+
const workflowRaw = await readTextFile(workflowPath);
|
|
119
|
+
const workflow = normalizeWorkflow(JSON.parse(workflowRaw));
|
|
120
|
+
|
|
121
|
+
try {
|
|
122
|
+
// Scheduler-only: do NOT execute nodes directly here.
|
|
123
|
+
// Instead, enqueue the next runnable node onto the assigned agent's pull queue.
|
|
124
|
+
// Graph-aware: if workflow.edges exist, choose the next runnable node by edge conditions.
|
|
125
|
+
|
|
126
|
+
let runCur = (await loadRunFile(teamDir, runsDir, run.runId)).run;
|
|
127
|
+
let idx = pickNextRunnableNodeIndex({ workflow, run: runCur });
|
|
128
|
+
|
|
129
|
+
// Auto-complete start/end nodes.
|
|
130
|
+
while (idx !== null) {
|
|
131
|
+
const n = workflow.nodes[idx]!;
|
|
132
|
+
const k = String(n.kind ?? '');
|
|
133
|
+
if (k !== 'start' && k !== 'end') break;
|
|
134
|
+
const ts = new Date().toISOString();
|
|
135
|
+
await appendRunLog(runPath, (cur) => ({
|
|
136
|
+
...cur,
|
|
137
|
+
nextNodeIndex: idx! + 1,
|
|
138
|
+
nodeStates: { ...(cur.nodeStates ?? {}), [n.id]: { status: 'success', ts } },
|
|
139
|
+
events: [...cur.events, { ts, type: 'node.completed', nodeId: n.id, kind: k, noop: true }],
|
|
140
|
+
nodeResults: [...(cur.nodeResults ?? []), { nodeId: n.id, kind: k, noop: true }],
|
|
141
|
+
}));
|
|
142
|
+
runCur = (await loadRunFile(teamDir, runsDir, run.runId)).run;
|
|
143
|
+
idx = pickNextRunnableNodeIndex({ workflow, run: runCur });
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (idx === null) {
|
|
147
|
+
await writeRunFile(runPath, (cur) => ({
|
|
148
|
+
...cur,
|
|
149
|
+
updatedAt: new Date().toISOString(),
|
|
150
|
+
status: 'completed',
|
|
151
|
+
claimedBy: null,
|
|
152
|
+
claimExpiresAt: null,
|
|
153
|
+
nextNodeIndex: cur.nextNodeIndex,
|
|
154
|
+
events: [...cur.events, { ts: new Date().toISOString(), type: 'run.completed' }],
|
|
155
|
+
}));
|
|
156
|
+
return { runId: run.runId, status: 'completed' };
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
const node = workflow.nodes[idx]!;
|
|
160
|
+
const assignedAgentId = String(node?.assignedTo?.agentId ?? '').trim();
|
|
161
|
+
if (!assignedAgentId) throw new Error(`Node ${node.id} missing assignedTo.agentId (required for pull-based execution)`);
|
|
162
|
+
|
|
163
|
+
await enqueueTask(teamDir, assignedAgentId, {
|
|
164
|
+
teamId,
|
|
165
|
+
runId: run.runId,
|
|
166
|
+
nodeId: node.id,
|
|
167
|
+
kind: 'execute_node',
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
await writeRunFile(runPath, (cur) => ({
|
|
171
|
+
...cur,
|
|
172
|
+
updatedAt: new Date().toISOString(),
|
|
173
|
+
status: 'waiting_workers',
|
|
174
|
+
claimedBy: null,
|
|
175
|
+
claimExpiresAt: null,
|
|
176
|
+
nextNodeIndex: idx,
|
|
177
|
+
events: [...cur.events, { ts: new Date().toISOString(), type: 'node.enqueued', nodeId: node.id, agentId: assignedAgentId }],
|
|
178
|
+
}));
|
|
179
|
+
|
|
180
|
+
return { runId: run.runId, status: 'waiting_workers' };
|
|
181
|
+
} catch (e) {
|
|
182
|
+
await writeRunFile(runPath, (cur) => ({
|
|
183
|
+
...cur,
|
|
184
|
+
updatedAt: new Date().toISOString(),
|
|
185
|
+
status: 'error',
|
|
186
|
+
claimedBy: null,
|
|
187
|
+
claimExpiresAt: null,
|
|
188
|
+
events: [...cur.events, { ts: new Date().toISOString(), type: 'run.error', message: (e as Error).message }],
|
|
189
|
+
}));
|
|
190
|
+
return { runId: run.runId, status: 'error', error: (e as Error).message };
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
const results = await Promise.all(claimed.map((c) => execClaimed(c.file, c.run)));
|
|
195
|
+
return { ok: true as const, teamId, claimed: claimed.length, results };
|
|
196
|
+
}
|
|
@@ -74,3 +74,42 @@ export type Workflow = {
|
|
|
74
74
|
edges?: WorkflowEdge[];
|
|
75
75
|
[k: string]: unknown;
|
|
76
76
|
};
|
|
77
|
+
|
|
78
|
+
export type RunEvent = Record<string, unknown> & { ts: string; type: string };
|
|
79
|
+
|
|
80
|
+
export type RunLog = {
|
|
81
|
+
runId: string;
|
|
82
|
+
createdAt: string;
|
|
83
|
+
updatedAt?: string;
|
|
84
|
+
teamId: string;
|
|
85
|
+
workflow: { file: string; id: string | null; name: string | null };
|
|
86
|
+
ticket: { file: string; number: string; lane: WorkflowLane };
|
|
87
|
+
trigger: { kind: string; at?: string };
|
|
88
|
+
status: string;
|
|
89
|
+
// Scheduler/runner fields
|
|
90
|
+
priority?: number;
|
|
91
|
+
claimedBy?: string | null;
|
|
92
|
+
claimExpiresAt?: string | null;
|
|
93
|
+
nextNodeIndex?: number;
|
|
94
|
+
// File-first workflow run state (graph-friendly)
|
|
95
|
+
nodeStates?: Record<string, { status: 'success' | 'error' | 'waiting'; ts: string; message?: string }>;
|
|
96
|
+
events: RunEvent[];
|
|
97
|
+
nodeResults?: Array<Record<string, unknown>>;
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
export type ApprovalRecord = {
|
|
101
|
+
runId: string;
|
|
102
|
+
teamId: string;
|
|
103
|
+
workflowFile: string;
|
|
104
|
+
nodeId: string;
|
|
105
|
+
bindingId: string;
|
|
106
|
+
requestedAt: string;
|
|
107
|
+
status: 'pending' | 'approved' | 'rejected';
|
|
108
|
+
decidedAt?: string;
|
|
109
|
+
ticket: string;
|
|
110
|
+
runLog: string;
|
|
111
|
+
note?: string;
|
|
112
|
+
resumedAt?: string;
|
|
113
|
+
resumedStatus?: string;
|
|
114
|
+
resumeError?: string;
|
|
115
|
+
};
|
|
@@ -0,0 +1,330 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import type { Workflow, WorkflowEdge, WorkflowLane, WorkflowNode, RunLog } from './workflow-types';
|
|
4
|
+
import { sanitizeOutboundPostText } from './outbound-sanitize';
|
|
5
|
+
import { readTextFile } from './workflow-runner-io';
|
|
6
|
+
|
|
7
|
+
export function isRecord(v: unknown): v is Record<string, unknown> {
|
|
8
|
+
return !!v && typeof v == 'object' && !Array.isArray(v);
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export function asRecord(v: unknown): Record<string, unknown> {
|
|
12
|
+
return isRecord(v) ? v : {};
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export function asString(v: unknown, fallback = ''): string {
|
|
16
|
+
return typeof v === 'string' ? v : (v == null ? fallback : String(v));
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export function asArray(v: unknown): unknown[] {
|
|
20
|
+
return Array.isArray(v) ? v : [];
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
export function normalizeWorkflow(raw: unknown): Workflow {
|
|
25
|
+
const w = asRecord(raw);
|
|
26
|
+
const id = asString(w['id']).trim();
|
|
27
|
+
if (!id) throw new Error('Workflow missing required field: id');
|
|
28
|
+
|
|
29
|
+
const meta = asRecord(w['meta']);
|
|
30
|
+
const approvalBindingId = asString(meta['approvalBindingId']).trim();
|
|
31
|
+
|
|
32
|
+
// Accept both canonical schema (node.kind/assignedTo/action/output) and ClawKitchen UI schema
|
|
33
|
+
// (node.type + node.config). Normalize into the canonical in-memory shape.
|
|
34
|
+
const nodes: WorkflowNode[] = asArray(w['nodes']).map((nRaw) => {
|
|
35
|
+
const n = asRecord(nRaw);
|
|
36
|
+
const config = asRecord(n['config']);
|
|
37
|
+
|
|
38
|
+
const kind = asString(n['kind'] ?? n['type']).trim();
|
|
39
|
+
|
|
40
|
+
const assignedToRec = asRecord(n['assignedTo']);
|
|
41
|
+
const agentId = asString(assignedToRec['agentId'] ?? config['agentId']).trim();
|
|
42
|
+
const assignedTo = agentId ? { agentId } : undefined;
|
|
43
|
+
|
|
44
|
+
const actionRaw = asRecord(n['action']);
|
|
45
|
+
const action = {
|
|
46
|
+
...actionRaw,
|
|
47
|
+
// LLM: allow either promptTemplatePath (preferred) or inline promptTemplate string
|
|
48
|
+
...(config['promptTemplate'] != null ? { promptTemplate: asString(config['promptTemplate']) } : {}),
|
|
49
|
+
...(config['promptTemplatePath'] != null ? { promptTemplatePath: asString(config['promptTemplatePath']) } : {}),
|
|
50
|
+
|
|
51
|
+
// Tool
|
|
52
|
+
...(config['tool'] != null ? { tool: asString(config['tool']) } : {}),
|
|
53
|
+
...(isRecord(config['args']) ? { args: config['args'] } : {}),
|
|
54
|
+
|
|
55
|
+
// Human approval
|
|
56
|
+
...(config['approvalBindingId'] != null ? { approvalBindingId: asString(config['approvalBindingId']) } : {}),
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
// Prefer explicit per-node approval binding, else fall back to workflow meta.approvalBindingId.
|
|
60
|
+
if (kind == 'human_approval' && !asString(action['approvalBindingId']).trim() && approvalBindingId) {
|
|
61
|
+
action['approvalBindingId'] = approvalBindingId;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
return {
|
|
65
|
+
...n,
|
|
66
|
+
id: asString(n['id']).trim(),
|
|
67
|
+
kind,
|
|
68
|
+
assignedTo,
|
|
69
|
+
action,
|
|
70
|
+
// Keep config around for debugging/back-compat, but don't depend on it.
|
|
71
|
+
config,
|
|
72
|
+
} as WorkflowNode;
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
const edges: WorkflowEdge[] | undefined = Array.isArray(w['edges'])
|
|
76
|
+
? asArray(w['edges']).map((eRaw) => {
|
|
77
|
+
const e = asRecord(eRaw);
|
|
78
|
+
return {
|
|
79
|
+
...e,
|
|
80
|
+
from: asString(e['from']).trim(),
|
|
81
|
+
to: asString(e['to']).trim(),
|
|
82
|
+
on: (asString(e['on']).trim() || 'success') as WorkflowEdge['on'],
|
|
83
|
+
} as WorkflowEdge;
|
|
84
|
+
})
|
|
85
|
+
: undefined;
|
|
86
|
+
|
|
87
|
+
return { ...w, id, nodes, ...(edges ? { edges } : {}) } as Workflow;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
export function isoCompact(ts = new Date()) {
|
|
91
|
+
// Runner runIds appear in filenames + URLs. Keep them conservative + URL-safe.
|
|
92
|
+
// - lowercase
|
|
93
|
+
// - no ':' or '.'
|
|
94
|
+
// - avoid 'T'/'Z' uppercase markers from ISO strings
|
|
95
|
+
return ts
|
|
96
|
+
.toISOString()
|
|
97
|
+
.toLowerCase()
|
|
98
|
+
.replace(/[:.]/g, '-');
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
export function assertLane(lane: string): asserts lane is WorkflowLane {
|
|
102
|
+
if (lane !== 'backlog' && lane !== 'in-progress' && lane !== 'testing' && lane !== 'done') {
|
|
103
|
+
throw new Error(`Invalid lane: ${lane}`);
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
export async function ensureDir(p: string) {
|
|
108
|
+
await fs.mkdir(p, { recursive: true });
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
export async function fileExists(p: string) {
|
|
112
|
+
try {
|
|
113
|
+
await fs.stat(p);
|
|
114
|
+
return true;
|
|
115
|
+
} catch { // intentional: best-effort file existence check
|
|
116
|
+
return false;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
export async function listTicketNumbers(teamDir: string): Promise<number[]> {
|
|
121
|
+
const workDir = path.join(teamDir, 'work');
|
|
122
|
+
const lanes = ['backlog', 'in-progress', 'testing', 'done'];
|
|
123
|
+
const nums: number[] = [];
|
|
124
|
+
|
|
125
|
+
for (const lane of lanes) {
|
|
126
|
+
const laneDir = path.join(workDir, lane);
|
|
127
|
+
if (!(await fileExists(laneDir))) continue;
|
|
128
|
+
const files = await fs.readdir(laneDir);
|
|
129
|
+
for (const f of files) {
|
|
130
|
+
const m = f.match(/^(\d{4})-/);
|
|
131
|
+
if (m) nums.push(Number(m[1]));
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
return nums;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
export async function nextTicketNumber(teamDir: string) {
|
|
138
|
+
const nums = await listTicketNumbers(teamDir);
|
|
139
|
+
const max = nums.length ? Math.max(...nums) : 0;
|
|
140
|
+
const next = max + 1;
|
|
141
|
+
return String(next).padStart(4, '0');
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
export function laneToStatus(lane: WorkflowLane) {
|
|
145
|
+
if (lane === 'backlog') return 'queued';
|
|
146
|
+
if (lane === 'in-progress') return 'in-progress';
|
|
147
|
+
if (lane === 'testing') return 'testing';
|
|
148
|
+
return 'done';
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
export function templateReplace(input: string, vars: Record<string, string>) {
|
|
152
|
+
let out = String(input ?? '');
|
|
153
|
+
for (const [k, v] of Object.entries(vars)) {
|
|
154
|
+
out = out.replaceAll(`{{${k}}}`, v);
|
|
155
|
+
}
|
|
156
|
+
return out;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
export function sanitizeDraftOnlyText(text: string): string {
|
|
160
|
+
// Back-compat: older workflow nodes mention 'draft only'.
|
|
161
|
+
// New canonical sanitizer also strips other internal-only disclaimer lines.
|
|
162
|
+
return sanitizeOutboundPostText(text);
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
export async function moveRunTicket(opts: {
|
|
166
|
+
teamDir: string;
|
|
167
|
+
ticketPath: string;
|
|
168
|
+
toLane: WorkflowLane;
|
|
169
|
+
}): Promise<{ ticketPath: string }> {
|
|
170
|
+
const { teamDir, ticketPath, toLane } = opts;
|
|
171
|
+
const workDir = path.join(teamDir, 'work');
|
|
172
|
+
const toDir = path.join(workDir, toLane);
|
|
173
|
+
await ensureDir(toDir);
|
|
174
|
+
const file = path.basename(ticketPath);
|
|
175
|
+
const dest = path.join(toDir, file);
|
|
176
|
+
|
|
177
|
+
if (path.resolve(ticketPath) !== path.resolve(dest)) {
|
|
178
|
+
await fs.rename(ticketPath, dest);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// Best-effort: update Status: line.
|
|
182
|
+
try {
|
|
183
|
+
const md = await readTextFile(dest);
|
|
184
|
+
const next = md.replace(/^Status: .*$/m, `Status: ${laneToStatus(toLane)}`);
|
|
185
|
+
if (next !== md) await fs.writeFile(dest, next, 'utf8');
|
|
186
|
+
} catch {
|
|
187
|
+
// intentional: best-effort status update
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
return { ticketPath: dest };
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
export function loadNodeStatesFromRun(run: RunLog): Record<string, { status: 'success' | 'error' | 'waiting'; ts: string }> {
|
|
194
|
+
const out: Record<string, { status: 'success' | 'error' | 'waiting'; ts: string }> = {};
|
|
195
|
+
|
|
196
|
+
const cur = run.nodeStates;
|
|
197
|
+
if (cur) {
|
|
198
|
+
for (const [nodeId, st] of Object.entries(cur)) {
|
|
199
|
+
if (st?.status === 'success' || st?.status === 'error' || st?.status === 'waiting') {
|
|
200
|
+
out[String(nodeId)] = { status: st.status, ts: st.ts };
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
for (const evRaw of Array.isArray(run.events) ? run.events : []) {
|
|
206
|
+
const ev = asRecord(evRaw);
|
|
207
|
+
const nodeId = asString(ev['nodeId']).trim();
|
|
208
|
+
if (!nodeId) continue;
|
|
209
|
+
const ts = asString(ev['ts']) || new Date().toISOString();
|
|
210
|
+
const type = asString(ev['type']).trim();
|
|
211
|
+
|
|
212
|
+
if (type === 'node.completed') out[nodeId] = { status: 'success', ts };
|
|
213
|
+
if (type === 'node.error') out[nodeId] = { status: 'error', ts };
|
|
214
|
+
if (type === 'node.awaiting_approval') out[nodeId] = { status: 'waiting', ts };
|
|
215
|
+
if (type === 'node.approved') out[nodeId] = { status: 'success', ts };
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
return out;
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
export function pickNextRunnableNodeIndex(opts: { workflow: Workflow; run: RunLog }): number | null {
|
|
222
|
+
const { workflow, run } = opts;
|
|
223
|
+
const nodes = Array.isArray(workflow.nodes) ? workflow.nodes : [];
|
|
224
|
+
if (!nodes.length) return null;
|
|
225
|
+
|
|
226
|
+
const hasEdges = Array.isArray(workflow.edges) && workflow.edges.length > 0;
|
|
227
|
+
if (!hasEdges) {
|
|
228
|
+
// Sequential fallback for legacy/no-edge workflows.
|
|
229
|
+
const start = typeof run.nextNodeIndex === 'number' ? run.nextNodeIndex : 0;
|
|
230
|
+
for (let i = Math.max(0, start); i < nodes.length; i++) {
|
|
231
|
+
const n = nodes[i]!;
|
|
232
|
+
const id = asString(n.id).trim();
|
|
233
|
+
if (!id) continue;
|
|
234
|
+
const st = (run.nodeStates ?? {})[id]?.status;
|
|
235
|
+
if (st === 'success' || st === 'error' || st === 'waiting') continue;
|
|
236
|
+
return i;
|
|
237
|
+
}
|
|
238
|
+
return null;
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
const nodeStates = loadNodeStatesFromRun(run);
|
|
242
|
+
|
|
243
|
+
// Revision semantics: if the run is in needs_revision, we intentionally allow
|
|
244
|
+
// re-execution of nodes from nextNodeIndex onward even if they previously
|
|
245
|
+
// completed in an earlier attempt. Events are append-only, so earlier
|
|
246
|
+
// node.completed events would otherwise make the graph think everything is
|
|
247
|
+
// already satisfied and incorrectly mark the run completed.
|
|
248
|
+
if (run.status === 'needs_revision' && typeof run.nextNodeIndex === 'number') {
|
|
249
|
+
for (let i = Math.max(0, run.nextNodeIndex); i < nodes.length; i++) {
|
|
250
|
+
const id = asString(nodes[i]?.id).trim();
|
|
251
|
+
if (id) delete nodeStates[id];
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
const incomingEdgesByNodeId = new Map<string, WorkflowEdge[]>();
|
|
256
|
+
const edges = Array.isArray(workflow.edges) ? workflow.edges : [];
|
|
257
|
+
for (const e of edges) {
|
|
258
|
+
const to = asString(e.to).trim();
|
|
259
|
+
if (!to) continue;
|
|
260
|
+
const list = incomingEdgesByNodeId.get(to) ?? [];
|
|
261
|
+
list.push(e);
|
|
262
|
+
incomingEdgesByNodeId.set(to, list);
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
function edgeSatisfied(e: WorkflowEdge): boolean {
|
|
266
|
+
const fromId = asString(e.from).trim();
|
|
267
|
+
const from = nodeStates[fromId]?.status;
|
|
268
|
+
const on = (e.on ?? 'success') as string;
|
|
269
|
+
if (!from) return false;
|
|
270
|
+
if (on === 'always') return from === 'success' || from === 'error';
|
|
271
|
+
if (on === 'error') return from === 'error';
|
|
272
|
+
return from === 'success';
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
function nodeReady(node: WorkflowNode): boolean {
|
|
276
|
+
const nodeId = asString(node.id).trim();
|
|
277
|
+
if (!nodeId) return false;
|
|
278
|
+
|
|
279
|
+
const st = nodeStates[nodeId]?.status;
|
|
280
|
+
if (st === 'success' || st === 'error' || st === 'waiting') return false;
|
|
281
|
+
|
|
282
|
+
const inputFrom = node.input?.from;
|
|
283
|
+
if (Array.isArray(inputFrom) && inputFrom.length) {
|
|
284
|
+
return inputFrom.every((dep) => nodeStates[asString(dep)]?.status === 'success');
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
const incoming = incomingEdgesByNodeId.get(nodeId) ?? [];
|
|
288
|
+
if (!incoming.length) return true;
|
|
289
|
+
return incoming.some(edgeSatisfied);
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
for (let i = 0; i < nodes.length; i++) {
|
|
293
|
+
if (nodeReady(nodes[i]!)) return i;
|
|
294
|
+
}
|
|
295
|
+
return null;
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
export async function appendRunLog(runLogPath: string, fn: (cur: RunLog) => RunLog) {
|
|
299
|
+
const raw = await readTextFile(runLogPath);
|
|
300
|
+
const cur = JSON.parse(raw) as RunLog;
|
|
301
|
+
const next0 = fn(cur);
|
|
302
|
+
const next = {
|
|
303
|
+
...next0,
|
|
304
|
+
updatedAt: new Date().toISOString(),
|
|
305
|
+
};
|
|
306
|
+
await fs.writeFile(runLogPath, JSON.stringify(next, null, 2), 'utf8');
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
export function nodeLabel(n: WorkflowNode) {
|
|
310
|
+
return `${n.kind}:${n.id}${n.name ? ` (${n.name})` : ''}`;
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
export function runFilePathFor(runsDir: string, runId: string) {
|
|
314
|
+
// File-first: one directory per run.
|
|
315
|
+
return path.join(runsDir, runId, 'run.json');
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
export async function loadRunFile(teamDir: string, runsDir: string, runId: string): Promise<{ path: string; run: RunLog }> {
|
|
319
|
+
const runPath = runFilePathFor(runsDir, runId);
|
|
320
|
+
if (!(await fileExists(runPath))) throw new Error(`Run file not found: ${path.relative(teamDir, runPath)}`);
|
|
321
|
+
const raw = await readTextFile(runPath);
|
|
322
|
+
return { path: runPath, run: JSON.parse(raw) as RunLog };
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
export async function writeRunFile(runPath: string, fn: (cur: RunLog) => RunLog) {
|
|
326
|
+
const raw = await readTextFile(runPath);
|
|
327
|
+
const cur = JSON.parse(raw) as RunLog;
|
|
328
|
+
const next = fn(cur);
|
|
329
|
+
await fs.writeFile(runPath, JSON.stringify(next, null, 2), 'utf8');
|
|
330
|
+
}
|