@jiggai/recipes 0.4.37 → 0.4.39
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/openclaw.plugin.json +1 -1
- package/package.json +1 -1
- package/src/lib/workflows/media-drivers/kling-video.driver.ts +28 -1
- package/src/lib/workflows/media-drivers/utils.ts +2 -6
- package/src/lib/workflows/workflow-node-executor.ts +6 -0
- package/src/lib/workflows/workflow-runner.ts +2 -0
- package/src/lib/workflows/workflow-types.ts +2 -1
- package/src/lib/workflows/workflow-worker.ts +536 -1
package/openclaw.plugin.json
CHANGED
package/package.json
CHANGED
|
@@ -3,6 +3,29 @@ import * as fs from 'fs';
|
|
|
3
3
|
import { MediaDriver, MediaDriverInvokeOpts, MediaDriverResult, DurationConstraints, parseDuration } from './types';
|
|
4
4
|
import { findSkillDir, runScript, parseMediaOutput } from './utils';
|
|
5
5
|
|
|
6
|
+
/**
|
|
7
|
+
* Map aspect ratios to Kling's supported values: 16:9, 9:16, 1:1
|
|
8
|
+
*/
|
|
9
|
+
function mapToKlingAspectRatio(ratio: string): string {
|
|
10
|
+
const normalized = ratio.toLowerCase().trim();
|
|
11
|
+
|
|
12
|
+
// Direct matches
|
|
13
|
+
if (normalized === '16:9' || normalized === '9:16' || normalized === '1:1') {
|
|
14
|
+
return normalized;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
// Map common ratios to closest Kling equivalent
|
|
18
|
+
const mappings: Record<string, string> = {
|
|
19
|
+
'4:3': '1:1', // 4:3 (1.33) → 1:1 (1.00) closest square-ish
|
|
20
|
+
'3:4': '9:16', // 3:4 (0.75) → 9:16 (0.56) closest vertical
|
|
21
|
+
'21:9': '16:9', // 21:9 (2.33) → 16:9 (1.78) closest widescreen
|
|
22
|
+
'2:1': '16:9', // 2:1 (2.00) → 16:9 (1.78) closest widescreen
|
|
23
|
+
'1:2': '9:16', // 1:2 (0.50) → 9:16 (0.56) closest vertical
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
return mappings[normalized] || '16:9'; // default fallback
|
|
27
|
+
}
|
|
28
|
+
|
|
6
29
|
/**
|
|
7
30
|
* Kling AI video driver — uses official `klingai` ClawHub skill.
|
|
8
31
|
*
|
|
@@ -39,6 +62,10 @@ export class KlingVideo implements MediaDriver {
|
|
|
39
62
|
const rawDuration = Math.max(3, Math.min(15, Number(parseDuration(config))));
|
|
40
63
|
const duration = String(rawDuration);
|
|
41
64
|
|
|
65
|
+
// Kling only supports 16:9, 9:16, 1:1 — map other ratios to closest match
|
|
66
|
+
const rawAspectRatio = String(config?.aspect_ratio ?? config?.size ?? '16:9');
|
|
67
|
+
const aspectRatio = mapToKlingAspectRatio(rawAspectRatio);
|
|
68
|
+
|
|
42
69
|
const skillDir = await findSkillDir(this.slug);
|
|
43
70
|
if (!skillDir) {
|
|
44
71
|
throw new Error(
|
|
@@ -58,7 +85,7 @@ export class KlingVideo implements MediaDriver {
|
|
|
58
85
|
'--prompt', prompt,
|
|
59
86
|
'--output_dir', outputDir,
|
|
60
87
|
'--duration', duration,
|
|
61
|
-
'--aspect_ratio',
|
|
88
|
+
'--aspect_ratio', aspectRatio,
|
|
62
89
|
'--mode', 'pro',
|
|
63
90
|
],
|
|
64
91
|
env: {
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import * as fs from 'fs/promises';
|
|
2
2
|
import * as path from 'path';
|
|
3
|
-
import {
|
|
3
|
+
import { execFileSync } from 'child_process';
|
|
4
4
|
|
|
5
5
|
/**
|
|
6
6
|
* Find a skill directory by searching common skill roots
|
|
@@ -96,13 +96,9 @@ export interface RunScriptOpts {
|
|
|
96
96
|
|
|
97
97
|
export function runScript(opts: RunScriptOpts): string {
|
|
98
98
|
const { runner, script, args = [], stdin, env, cwd, timeout } = opts;
|
|
99
|
-
|
|
100
|
-
const command = args.length > 0
|
|
101
|
-
? `${runner} ${JSON.stringify(script)} ${args.map(arg => JSON.stringify(arg)).join(' ')}`
|
|
102
|
-
: `${runner} ${JSON.stringify(script)}`;
|
|
103
99
|
|
|
104
100
|
try {
|
|
105
|
-
return
|
|
101
|
+
return execFileSync(runner, [script, ...args], {
|
|
106
102
|
cwd,
|
|
107
103
|
timeout,
|
|
108
104
|
encoding: 'utf8',
|
|
@@ -499,6 +499,12 @@ export async function executeWorkflowNodes(opts: {
|
|
|
499
499
|
}
|
|
500
500
|
}
|
|
501
501
|
|
|
502
|
+
if (kind === 'handoff') {
|
|
503
|
+
// Handoff nodes are supported in the pull-based worker (workflow-worker.ts).
|
|
504
|
+
// The synchronous executor doesn't support them yet — use `enqueue` + worker-tick instead.
|
|
505
|
+
throw new Error(`Node ${nodeLabel(node)}: handoff nodes require pull-based execution (use 'openclaw recipes workflows enqueue' + worker-tick)`);
|
|
506
|
+
}
|
|
507
|
+
|
|
502
508
|
throw new Error(`Unsupported node kind: ${node.kind} (${nodeLabel(node)})`);
|
|
503
509
|
}
|
|
504
510
|
|
|
@@ -27,6 +27,7 @@ export async function enqueueWorkflowRun(api: OpenClawPluginApi, opts: {
|
|
|
27
27
|
teamId: string;
|
|
28
28
|
workflowFile: string; // filename under shared-context/workflows/
|
|
29
29
|
trigger?: { kind: string; at?: string };
|
|
30
|
+
triggerInput?: Record<string, unknown>;
|
|
30
31
|
}) {
|
|
31
32
|
const teamId = String(opts.teamId);
|
|
32
33
|
const teamDir = resolveTeamDir(api, teamId);
|
|
@@ -94,6 +95,7 @@ export async function enqueueWorkflowRun(api: OpenClawPluginApi, opts: {
|
|
|
94
95
|
workflow: { file: opts.workflowFile, id: workflow.id ?? null, name: workflow.name ?? null },
|
|
95
96
|
ticket: { file: path.relative(teamDir, ticketPath), number: ticketNum, lane: initialLane },
|
|
96
97
|
trigger,
|
|
98
|
+
...(opts.triggerInput && Object.keys(opts.triggerInput).length > 0 ? { triggerInput: opts.triggerInput } : {}),
|
|
97
99
|
status: 'queued',
|
|
98
100
|
priority: 0,
|
|
99
101
|
claimedBy: null,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
export type WorkflowLane = 'backlog' | 'in-progress' | 'testing' | 'done';
|
|
2
2
|
|
|
3
|
-
export type WorkflowNodeKind = 'llm' | 'human_approval' | 'writeback' | 'tool' | 'start' | 'end' | string;
|
|
3
|
+
export type WorkflowNodeKind = 'llm' | 'human_approval' | 'writeback' | 'tool' | 'handoff' | 'start' | 'end' | string;
|
|
4
4
|
|
|
5
5
|
export type WorkflowEdgeOn = 'success' | 'error' | 'always';
|
|
6
6
|
|
|
@@ -85,6 +85,7 @@ export type RunLog = {
|
|
|
85
85
|
workflow: { file: string; id: string | null; name: string | null };
|
|
86
86
|
ticket: { file: string; number: string; lane: WorkflowLane };
|
|
87
87
|
trigger: { kind: string; at?: string };
|
|
88
|
+
triggerInput?: Record<string, unknown>;
|
|
88
89
|
status: string;
|
|
89
90
|
// Scheduler/runner fields
|
|
90
91
|
priority?: number;
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import fs from 'node:fs/promises';
|
|
2
2
|
import path from 'node:path';
|
|
3
|
+
import crypto from 'node:crypto';
|
|
3
4
|
import type { OpenClawPluginApi } from 'openclaw/plugin-sdk';
|
|
4
5
|
import type { ToolTextResult } from '../../toolsInvoke';
|
|
5
6
|
import { toolsInvoke } from '../../toolsInvoke';
|
|
@@ -7,7 +8,7 @@ import { resolveTeamDir } from '../workspace';
|
|
|
7
8
|
import { getDriver } from './media-drivers/registry';
|
|
8
9
|
import { GenericDriver } from './media-drivers/generic.driver';
|
|
9
10
|
import { loadConfigEnv } from './media-drivers/utils';
|
|
10
|
-
import type { WorkflowLane } from './workflow-types';
|
|
11
|
+
import type { WorkflowLane, WorkflowNode, RunLog } from './workflow-types';
|
|
11
12
|
import { dequeueNextTask, enqueueTask, releaseTaskClaim, compactQueue } from './workflow-queue';
|
|
12
13
|
import { loadPriorLlmInput, loadProposedPostTextFromPriorNode } from './workflow-node-output-readers';
|
|
13
14
|
import { readTextFile } from './workflow-runner-io';
|
|
@@ -16,6 +17,7 @@ import {
|
|
|
16
17
|
asRecord, asString, isRecord,
|
|
17
18
|
normalizeWorkflow,
|
|
18
19
|
assertLane, ensureDir, fileExists,
|
|
20
|
+
isoCompact, nextTicketNumber, laneToStatus,
|
|
19
21
|
moveRunTicket, appendRunLog, writeRunFile, loadRunFile,
|
|
20
22
|
runFilePathFor, nodeLabel,
|
|
21
23
|
loadNodeStatesFromRun, pickNextRunnableNodeIndex,
|
|
@@ -129,6 +131,18 @@ async function buildTemplateVars(
|
|
|
129
131
|
} as Record<string, string>;
|
|
130
132
|
|
|
131
133
|
const { run: runSnap } = await loadRunFile(teamDir, runsDir, runId);
|
|
134
|
+
|
|
135
|
+
// Expose triggerInput as template variables (for handoff-injected data)
|
|
136
|
+
if (runSnap.triggerInput && typeof runSnap.triggerInput === 'object') {
|
|
137
|
+
for (const [key, value] of Object.entries(runSnap.triggerInput)) {
|
|
138
|
+
if (typeof value === 'string') {
|
|
139
|
+
vars[`trigger.${key}`] = value;
|
|
140
|
+
} else if (value !== null && value !== undefined) {
|
|
141
|
+
vars[`trigger.${key}`] = JSON.stringify(value);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
132
146
|
for (const nr of (runSnap.nodeResults ?? [])) {
|
|
133
147
|
const nid = String((nr as Record<string, unknown>).nodeId ?? '');
|
|
134
148
|
const nrOutPath = String((nr as Record<string, unknown>).nodeOutputPath ?? '');
|
|
@@ -171,6 +185,330 @@ async function buildTemplateVars(
|
|
|
171
185
|
return vars;
|
|
172
186
|
}
|
|
173
187
|
|
|
188
|
+
/**
|
|
189
|
+
* Enqueue a workflow run from a handoff node.
|
|
190
|
+
* This is a lightweight version of enqueueWorkflowRun that lives in the worker
|
|
191
|
+
* to avoid circular imports (workflow-runner re-exports workflow-worker).
|
|
192
|
+
*/
|
|
193
|
+
async function enqueueWorkflowRunForHandoff(api: OpenClawPluginApi, opts: {
|
|
194
|
+
teamId: string;
|
|
195
|
+
workflowFile: string;
|
|
196
|
+
trigger?: { kind: string; at?: string };
|
|
197
|
+
triggerInput?: Record<string, unknown>;
|
|
198
|
+
}): Promise<{ runId: string; runLogPath: string }> {
|
|
199
|
+
const teamId = String(opts.teamId);
|
|
200
|
+
const teamDir = resolveTeamDir(api, teamId);
|
|
201
|
+
const sharedContextDir = path.join(teamDir, 'shared-context');
|
|
202
|
+
const workflowsDir = path.join(sharedContextDir, 'workflows');
|
|
203
|
+
const runsDir = path.join(sharedContextDir, 'workflow-runs');
|
|
204
|
+
|
|
205
|
+
const workflowPath = path.join(workflowsDir, opts.workflowFile);
|
|
206
|
+
const raw = await readTextFile(workflowPath);
|
|
207
|
+
const workflow = normalizeWorkflow(JSON.parse(raw));
|
|
208
|
+
|
|
209
|
+
if (!workflow.nodes?.length) throw new Error('Handoff target workflow has no nodes');
|
|
210
|
+
|
|
211
|
+
const firstLaneRaw = String(
|
|
212
|
+
workflow.nodes.find(n => n?.config && typeof n.config === 'object' && 'lane' in n.config)?.config?.lane ?? 'backlog'
|
|
213
|
+
);
|
|
214
|
+
assertLane(firstLaneRaw);
|
|
215
|
+
const initialLane: WorkflowLane = firstLaneRaw;
|
|
216
|
+
|
|
217
|
+
const runId = `${isoCompact()}-${crypto.randomBytes(4).toString('hex')}`;
|
|
218
|
+
await ensureDir(runsDir);
|
|
219
|
+
|
|
220
|
+
const runDir = path.join(runsDir, runId);
|
|
221
|
+
await ensureDir(runDir);
|
|
222
|
+
await Promise.all([
|
|
223
|
+
ensureDir(path.join(runDir, 'node-outputs')),
|
|
224
|
+
ensureDir(path.join(runDir, 'artifacts')),
|
|
225
|
+
ensureDir(path.join(runDir, 'approvals')),
|
|
226
|
+
]);
|
|
227
|
+
|
|
228
|
+
const runLogPath = path.join(runDir, 'run.json');
|
|
229
|
+
|
|
230
|
+
const ticketNum = await nextTicketNumber(teamDir);
|
|
231
|
+
const slug = `workflow-run-${(workflow.id ?? path.basename(opts.workflowFile, path.extname(opts.workflowFile))).replace(/[^a-z0-9-]+/gi, '-').toLowerCase()}`;
|
|
232
|
+
const ticketFile = `${ticketNum}-${slug}.md`;
|
|
233
|
+
|
|
234
|
+
const laneDir = path.join(teamDir, 'work', initialLane);
|
|
235
|
+
await ensureDir(laneDir);
|
|
236
|
+
const ticketPath = path.join(laneDir, ticketFile);
|
|
237
|
+
|
|
238
|
+
const trigger = opts.trigger ?? { kind: 'handoff' };
|
|
239
|
+
const createdAt = new Date().toISOString();
|
|
240
|
+
const handoffMeta = opts.triggerInput?._handoff as Record<string, unknown> | undefined;
|
|
241
|
+
|
|
242
|
+
const md = [
|
|
243
|
+
`# ${ticketNum} — Workflow run: ${workflow.name ?? workflow.id ?? opts.workflowFile}\n\n`,
|
|
244
|
+
`Owner: lead`,
|
|
245
|
+
`Status: ${laneToStatus(initialLane)}`,
|
|
246
|
+
`\n## Run`,
|
|
247
|
+
`- workflow: ${path.relative(teamDir, workflowPath)}`,
|
|
248
|
+
`- run dir: ${path.relative(teamDir, runDir)}`,
|
|
249
|
+
`- run file: ${path.relative(teamDir, runLogPath)}`,
|
|
250
|
+
`- trigger: ${trigger.kind}${trigger.at ? ` @ ${trigger.at}` : ''}`,
|
|
251
|
+
`- runId: ${runId}`,
|
|
252
|
+
handoffMeta ? `- handoff from: team=${handoffMeta.sourceTeamId}, workflow=${handoffMeta.sourceWorkflowName}, run=${handoffMeta.sourceRunId}` : '',
|
|
253
|
+
`\n## Notes`,
|
|
254
|
+
`- Created by: handoff node`,
|
|
255
|
+
``,
|
|
256
|
+
].filter(Boolean).join('\n');
|
|
257
|
+
|
|
258
|
+
const initialLog: RunLog = {
|
|
259
|
+
runId,
|
|
260
|
+
createdAt,
|
|
261
|
+
updatedAt: createdAt,
|
|
262
|
+
teamId,
|
|
263
|
+
workflow: { file: opts.workflowFile, id: workflow.id ?? null, name: workflow.name ?? null },
|
|
264
|
+
ticket: { file: path.relative(teamDir, ticketPath), number: ticketNum, lane: initialLane },
|
|
265
|
+
trigger,
|
|
266
|
+
...(opts.triggerInput && Object.keys(opts.triggerInput).length > 0 ? { triggerInput: opts.triggerInput } : {}),
|
|
267
|
+
status: 'queued',
|
|
268
|
+
priority: 0,
|
|
269
|
+
claimedBy: null,
|
|
270
|
+
claimExpiresAt: null,
|
|
271
|
+
nextNodeIndex: 0,
|
|
272
|
+
events: [{ ts: createdAt, type: 'run.enqueued', lane: initialLane, trigger: trigger.kind }],
|
|
273
|
+
nodeResults: [],
|
|
274
|
+
};
|
|
275
|
+
|
|
276
|
+
await Promise.all([
|
|
277
|
+
fs.writeFile(ticketPath, md, 'utf8'),
|
|
278
|
+
fs.writeFile(runLogPath, JSON.stringify(initialLog, null, 2), 'utf8'),
|
|
279
|
+
]);
|
|
280
|
+
|
|
281
|
+
return { runId, runLogPath };
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
/**
|
|
285
|
+
* Check for waiting_handoff runs and resolve them if the target run has completed.
|
|
286
|
+
* Called at the start of each worker tick before processing the normal queue.
|
|
287
|
+
*/
|
|
288
|
+
async function checkWaitingHandoffs(api: OpenClawPluginApi, teamId: string, teamDir: string): Promise<Array<{ runId: string; nodeId: string; status: string }>> {
|
|
289
|
+
const results: Array<{ runId: string; nodeId: string; status: string }> = [];
|
|
290
|
+
const runsDir = path.join(teamDir, 'shared-context', 'workflow-runs');
|
|
291
|
+
|
|
292
|
+
// Scan all active runs for handoff-waits directories
|
|
293
|
+
let runDirs: string[] = [];
|
|
294
|
+
try {
|
|
295
|
+
const entries = await fs.readdir(runsDir, { withFileTypes: true });
|
|
296
|
+
runDirs = entries.filter(e => e.isDirectory()).map(e => e.name);
|
|
297
|
+
} catch { return results; }
|
|
298
|
+
|
|
299
|
+
for (const runDirName of runDirs) {
|
|
300
|
+
const runDir = path.join(runsDir, runDirName);
|
|
301
|
+
const handoffWaitDir = path.join(runDir, 'handoff-waits');
|
|
302
|
+
|
|
303
|
+
let waitFiles: string[] = [];
|
|
304
|
+
try {
|
|
305
|
+
const entries = await fs.readdir(handoffWaitDir);
|
|
306
|
+
waitFiles = entries.filter(f => f.endsWith('.json'));
|
|
307
|
+
} catch { continue; } // No handoff-waits dir
|
|
308
|
+
|
|
309
|
+
if (waitFiles.length === 0) continue;
|
|
310
|
+
|
|
311
|
+
// Load current run to verify it's still waiting_handoff
|
|
312
|
+
const runPath = path.join(runDir, 'run.json');
|
|
313
|
+
let run: RunLog;
|
|
314
|
+
try {
|
|
315
|
+
const raw = await fs.readFile(runPath, 'utf8');
|
|
316
|
+
run = JSON.parse(raw) as RunLog;
|
|
317
|
+
} catch { continue; }
|
|
318
|
+
|
|
319
|
+
if (run.status !== 'waiting_handoff') {
|
|
320
|
+
// Clean up stale wait markers
|
|
321
|
+
for (const wf of waitFiles) {
|
|
322
|
+
try { await fs.unlink(path.join(handoffWaitDir, wf)); } catch { /* ignore */ }
|
|
323
|
+
}
|
|
324
|
+
continue;
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
for (const waitFile of waitFiles) {
|
|
328
|
+
const waitPath = path.join(handoffWaitDir, waitFile);
|
|
329
|
+
let marker: {
|
|
330
|
+
nodeId: string; nodeIdx: number;
|
|
331
|
+
targetTeamId: string; targetWorkflowId: string; targetWorkflowFile: string;
|
|
332
|
+
targetRunId: string; startedAt: string; timeoutAt: string;
|
|
333
|
+
nodeOutputRel: string;
|
|
334
|
+
};
|
|
335
|
+
try {
|
|
336
|
+
marker = JSON.parse(await fs.readFile(waitPath, 'utf8'));
|
|
337
|
+
} catch { continue; }
|
|
338
|
+
|
|
339
|
+
// Check timeout
|
|
340
|
+
const now = Date.now();
|
|
341
|
+
if (new Date(marker.timeoutAt).getTime() <= now) {
|
|
342
|
+
// Timeout — fail the node
|
|
343
|
+
const failTs = new Date().toISOString();
|
|
344
|
+
await appendRunLog(runPath, (cur) => ({
|
|
345
|
+
...cur,
|
|
346
|
+
status: 'error',
|
|
347
|
+
nodeStates: { ...(cur.nodeStates ?? {}), [marker.nodeId]: { status: 'error', ts: failTs, message: 'Handoff wait timed out' } },
|
|
348
|
+
events: [...cur.events, {
|
|
349
|
+
ts: failTs, type: 'node.error', nodeId: marker.nodeId, kind: 'handoff',
|
|
350
|
+
error: `Handoff wait timed out after ${Math.round((now - new Date(marker.startedAt).getTime()) / 1000)}s`,
|
|
351
|
+
}],
|
|
352
|
+
}));
|
|
353
|
+
try { await fs.unlink(waitPath); } catch { /* ignore */ }
|
|
354
|
+
results.push({ runId: run.runId, nodeId: marker.nodeId, status: 'timeout' });
|
|
355
|
+
continue;
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
// Check target run status
|
|
359
|
+
const targetTeamDir = resolveTeamDir(api, marker.targetTeamId);
|
|
360
|
+
const targetRunsDir = path.join(targetTeamDir, 'shared-context', 'workflow-runs');
|
|
361
|
+
let targetRun: RunLog;
|
|
362
|
+
try {
|
|
363
|
+
const loaded = await loadRunFile(targetTeamDir, targetRunsDir, marker.targetRunId);
|
|
364
|
+
targetRun = loaded.run;
|
|
365
|
+
} catch {
|
|
366
|
+
// Target run not found — may have been cleaned up; fail
|
|
367
|
+
const failTs = new Date().toISOString();
|
|
368
|
+
await appendRunLog(runPath, (cur) => ({
|
|
369
|
+
...cur,
|
|
370
|
+
status: 'error',
|
|
371
|
+
nodeStates: { ...(cur.nodeStates ?? {}), [marker.nodeId]: { status: 'error', ts: failTs, message: 'Target run not found' } },
|
|
372
|
+
events: [...cur.events, {
|
|
373
|
+
ts: failTs, type: 'node.error', nodeId: marker.nodeId, kind: 'handoff',
|
|
374
|
+
error: `Target run ${marker.targetRunId} not found in team ${marker.targetTeamId}`,
|
|
375
|
+
}],
|
|
376
|
+
}));
|
|
377
|
+
try { await fs.unlink(waitPath); } catch { /* ignore */ }
|
|
378
|
+
results.push({ runId: run.runId, nodeId: marker.nodeId, status: 'error' });
|
|
379
|
+
continue;
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
if (targetRun.status === 'completed' || targetRun.status === 'done') {
|
|
383
|
+
// Target completed — resolve handoff node with target's output
|
|
384
|
+
const targetOutput: Record<string, unknown> = {};
|
|
385
|
+
if (Array.isArray(targetRun.nodeResults)) {
|
|
386
|
+
for (const nr of targetRun.nodeResults) {
|
|
387
|
+
if (nr.nodeId && typeof nr.nodeId === 'string') {
|
|
388
|
+
targetOutput[nr.nodeId as string] = nr;
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
const nodeOutputAbs = path.resolve(runDir, marker.nodeOutputRel);
|
|
394
|
+
await ensureDir(path.dirname(nodeOutputAbs));
|
|
395
|
+
const outputObj = {
|
|
396
|
+
runId: run.runId,
|
|
397
|
+
teamId,
|
|
398
|
+
nodeId: marker.nodeId,
|
|
399
|
+
kind: 'handoff',
|
|
400
|
+
completedAt: new Date().toISOString(),
|
|
401
|
+
text: JSON.stringify({
|
|
402
|
+
targetTeamId: marker.targetTeamId,
|
|
403
|
+
targetWorkflowId: marker.targetWorkflowId,
|
|
404
|
+
targetRunId: marker.targetRunId,
|
|
405
|
+
status: 'completed',
|
|
406
|
+
targetOutput,
|
|
407
|
+
}, null, 2),
|
|
408
|
+
};
|
|
409
|
+
await fs.writeFile(nodeOutputAbs, JSON.stringify(outputObj, null, 2) + '\n', 'utf8');
|
|
410
|
+
|
|
411
|
+
const completedTs = new Date().toISOString();
|
|
412
|
+
|
|
413
|
+
// Load workflow to find next node
|
|
414
|
+
const workflowsDir = path.join(teamDir, 'shared-context', 'workflows');
|
|
415
|
+
let workflow;
|
|
416
|
+
try {
|
|
417
|
+
const wfRaw = await fs.readFile(path.join(workflowsDir, run.workflow.file), 'utf8');
|
|
418
|
+
workflow = normalizeWorkflow(JSON.parse(wfRaw));
|
|
419
|
+
} catch { workflow = null; }
|
|
420
|
+
|
|
421
|
+
await appendRunLog(runPath, (cur) => ({
|
|
422
|
+
...cur,
|
|
423
|
+
status: 'waiting_workers',
|
|
424
|
+
nextNodeIndex: marker.nodeIdx + 1,
|
|
425
|
+
nodeStates: { ...(cur.nodeStates ?? {}), [marker.nodeId]: { status: 'success', ts: completedTs } },
|
|
426
|
+
events: [...cur.events, {
|
|
427
|
+
ts: completedTs, type: 'node.completed', nodeId: marker.nodeId, kind: 'handoff',
|
|
428
|
+
targetTeamId: marker.targetTeamId, targetWorkflowId: marker.targetWorkflowId,
|
|
429
|
+
targetRunId: marker.targetRunId, mode: 'wait-for-completion',
|
|
430
|
+
nodeOutputPath: marker.nodeOutputRel,
|
|
431
|
+
}],
|
|
432
|
+
}));
|
|
433
|
+
|
|
434
|
+
// Enqueue next node if workflow is available
|
|
435
|
+
if (workflow) {
|
|
436
|
+
const updatedRun = (await loadRunFile(teamDir, runsDir, run.runId)).run;
|
|
437
|
+
const nextIdx = pickNextRunnableNodeIndex({ workflow, run: updatedRun });
|
|
438
|
+
|
|
439
|
+
if (nextIdx !== null && nextIdx >= 0 && nextIdx < workflow.nodes.length) {
|
|
440
|
+
const nextNode = workflow.nodes[nextIdx];
|
|
441
|
+
if (nextNode.type === 'end' || nextNode.type === 'start') {
|
|
442
|
+
// Auto-complete start/end
|
|
443
|
+
const autoTs = new Date().toISOString();
|
|
444
|
+
await appendRunLog(runPath, (cur) => ({
|
|
445
|
+
...cur,
|
|
446
|
+
nextNodeIndex: nextIdx + 1,
|
|
447
|
+
nodeStates: { ...(cur.nodeStates ?? {}), [nextNode.id]: { status: 'success', ts: autoTs } },
|
|
448
|
+
events: [...cur.events, { ts: autoTs, type: 'node.completed', nodeId: nextNode.id, kind: nextNode.type }],
|
|
449
|
+
}));
|
|
450
|
+
// Check if run is done
|
|
451
|
+
const afterAutoRun = (await loadRunFile(teamDir, runsDir, run.runId)).run;
|
|
452
|
+
const afterNext = pickNextRunnableNodeIndex({ workflow, run: afterAutoRun });
|
|
453
|
+
if (afterNext === null) {
|
|
454
|
+
const doneTs = new Date().toISOString();
|
|
455
|
+
await appendRunLog(runPath, (cur) => ({
|
|
456
|
+
...cur,
|
|
457
|
+
status: 'completed',
|
|
458
|
+
events: [...cur.events, { ts: doneTs, type: 'run.completed' }],
|
|
459
|
+
}));
|
|
460
|
+
}
|
|
461
|
+
} else {
|
|
462
|
+
// Enqueue next real node to the appropriate agent's queue
|
|
463
|
+
const assignedAgent = String(nextNode.assignedTo ?? '').trim();
|
|
464
|
+
const targetAgent = assignedAgent || run.claimedBy || '';
|
|
465
|
+
if (targetAgent) {
|
|
466
|
+
await enqueueTask(teamDir, targetAgent, {
|
|
467
|
+
teamId,
|
|
468
|
+
runId: run.runId,
|
|
469
|
+
nodeId: nextNode.id,
|
|
470
|
+
kind: 'execute_node',
|
|
471
|
+
});
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
} else if (nextIdx === null) {
|
|
475
|
+
// All nodes done
|
|
476
|
+
const doneTs = new Date().toISOString();
|
|
477
|
+
await appendRunLog(runPath, (cur) => ({
|
|
478
|
+
...cur,
|
|
479
|
+
status: 'completed',
|
|
480
|
+
events: [...cur.events, { ts: doneTs, type: 'run.completed' }],
|
|
481
|
+
}));
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
try { await fs.unlink(waitPath); } catch { /* ignore */ }
|
|
486
|
+
results.push({ runId: run.runId, nodeId: marker.nodeId, status: 'completed' });
|
|
487
|
+
} else if (targetRun.status === 'error' || targetRun.status === 'failed') {
|
|
488
|
+
// Target failed — fail the handoff node too
|
|
489
|
+
const failTs = new Date().toISOString();
|
|
490
|
+
const lastError = targetRun.events?.filter(e => e.type === 'node.error').pop();
|
|
491
|
+
await appendRunLog(runPath, (cur) => ({
|
|
492
|
+
...cur,
|
|
493
|
+
status: 'error',
|
|
494
|
+
nodeStates: { ...(cur.nodeStates ?? {}), [marker.nodeId]: {
|
|
495
|
+
status: 'error', ts: failTs,
|
|
496
|
+
message: `Target workflow failed: ${lastError?.error ?? 'unknown error'}`,
|
|
497
|
+
} },
|
|
498
|
+
events: [...cur.events, {
|
|
499
|
+
ts: failTs, type: 'node.error', nodeId: marker.nodeId, kind: 'handoff',
|
|
500
|
+
error: `Target run ${marker.targetRunId} failed`,
|
|
501
|
+
}],
|
|
502
|
+
}));
|
|
503
|
+
try { await fs.unlink(waitPath); } catch { /* ignore */ }
|
|
504
|
+
results.push({ runId: run.runId, nodeId: marker.nodeId, status: 'error' });
|
|
505
|
+
}
|
|
506
|
+
// else: still running — do nothing, check again next tick
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
return results;
|
|
510
|
+
}
|
|
511
|
+
|
|
174
512
|
// eslint-disable-next-line complexity, max-lines-per-function
|
|
175
513
|
export async function runWorkflowWorkerTick(api: OpenClawPluginApi, opts: {
|
|
176
514
|
teamId: string;
|
|
@@ -193,6 +531,14 @@ export async function runWorkflowWorkerTick(api: OpenClawPluginApi, opts: {
|
|
|
193
531
|
|
|
194
532
|
const results: Array<{ taskId: string; runId: string; nodeId: string; status: string }> = [];
|
|
195
533
|
|
|
534
|
+
// Check for waiting_handoff runs before processing normal queue
|
|
535
|
+
try {
|
|
536
|
+
const handoffResults = await checkWaitingHandoffs(api, teamId, teamDir);
|
|
537
|
+
for (const hr of handoffResults) {
|
|
538
|
+
results.push({ taskId: '', runId: hr.runId, nodeId: hr.nodeId, status: `handoff:${hr.status}` });
|
|
539
|
+
}
|
|
540
|
+
} catch { /* handoff check is best-effort */ }
|
|
541
|
+
|
|
196
542
|
// Default lock TTL (used when we don't know the node config yet).
|
|
197
543
|
// This must be comfortably larger than typical media generation durations.
|
|
198
544
|
const DEFAULT_LOCK_TTL_MS = 30 * 60 * 1000;
|
|
@@ -1198,6 +1544,190 @@ export async function runWorkflowWorkerTick(api: OpenClawPluginApi, opts: {
|
|
|
1198
1544
|
events: [...cur.events, { ts: completedTs, type: 'node.completed', nodeId: node.id, kind: node.kind, nodeOutputPath: path.relative(teamDir, nodeOutputAbs) }],
|
|
1199
1545
|
nodeResults: [...(cur.nodeResults ?? []), { nodeId: node.id, kind: node.kind, mediaType, agentId: agentIdMedia || agentId, nodeOutputPath: path.relative(teamDir, nodeOutputAbs), bytes: new TextEncoder().encode(text).byteLength }],
|
|
1200
1546
|
}));
|
|
1547
|
+
} else if (kind === 'handoff') {
|
|
1548
|
+
// ── Handoff node: trigger a run on another workflow (optionally on a different team) ──
|
|
1549
|
+
const config = asRecord((node as unknown as Record<string, unknown>)['config']);
|
|
1550
|
+
const action = asRecord(node.action);
|
|
1551
|
+
|
|
1552
|
+
const targetTeamId = asString(config['targetTeamId'] ?? action['targetTeamId']).trim() || teamId;
|
|
1553
|
+
const targetWorkflowId = asString(config['targetWorkflowId'] ?? action['targetWorkflowId']).trim();
|
|
1554
|
+
if (!targetWorkflowId) throw new Error(`Node ${nodeLabel(node)} missing config.targetWorkflowId`);
|
|
1555
|
+
|
|
1556
|
+
// Resolve variable mapping: each key is the target's trigger input key, each value is a {{template}} expression
|
|
1557
|
+
const variableMapping = asRecord(config['variableMapping'] ?? action['variableMapping']);
|
|
1558
|
+
|
|
1559
|
+
// Build template vars from prior node outputs
|
|
1560
|
+
const vars = await buildTemplateVars(teamDir, runsDir, task.runId, workflowFile, workflow);
|
|
1561
|
+
vars['node.id'] = node.id;
|
|
1562
|
+
|
|
1563
|
+
// Resolve mapped variables
|
|
1564
|
+
const triggerInput: Record<string, unknown> = {
|
|
1565
|
+
_handoff: {
|
|
1566
|
+
sourceTeamId: teamId,
|
|
1567
|
+
sourceWorkflowId: String(workflow.id ?? ''),
|
|
1568
|
+
sourceWorkflowName: String(workflow.name ?? workflow.id ?? workflowFile),
|
|
1569
|
+
sourceRunId: task.runId,
|
|
1570
|
+
sourceNodeId: node.id,
|
|
1571
|
+
},
|
|
1572
|
+
};
|
|
1573
|
+
for (const [targetKey, templateExpr] of Object.entries(variableMapping)) {
|
|
1574
|
+
if (typeof templateExpr === 'string') {
|
|
1575
|
+
triggerInput[targetKey] = templateReplace(templateExpr, vars);
|
|
1576
|
+
}
|
|
1577
|
+
}
|
|
1578
|
+
|
|
1579
|
+
// Find the target workflow file
|
|
1580
|
+
const targetTeamDir = resolveTeamDir(api, targetTeamId);
|
|
1581
|
+
const targetWorkflowsDir = path.join(targetTeamDir, 'shared-context', 'workflows');
|
|
1582
|
+
let targetWorkflowFile = '';
|
|
1583
|
+
|
|
1584
|
+
// Try exact filename match first, then search by workflow id
|
|
1585
|
+
const candidateFiles = [
|
|
1586
|
+
`${targetWorkflowId}.json`,
|
|
1587
|
+
`${targetWorkflowId}`,
|
|
1588
|
+
];
|
|
1589
|
+
for (const candidate of candidateFiles) {
|
|
1590
|
+
const candidatePath = path.join(targetWorkflowsDir, candidate);
|
|
1591
|
+
if (await fileExists(candidatePath)) {
|
|
1592
|
+
targetWorkflowFile = candidate;
|
|
1593
|
+
break;
|
|
1594
|
+
}
|
|
1595
|
+
}
|
|
1596
|
+
|
|
1597
|
+
// If not found by filename, scan workflows for matching id
|
|
1598
|
+
if (!targetWorkflowFile) {
|
|
1599
|
+
try {
|
|
1600
|
+
const wfFiles = await fs.readdir(targetWorkflowsDir);
|
|
1601
|
+
for (const wf of wfFiles) {
|
|
1602
|
+
if (!wf.endsWith('.json')) continue;
|
|
1603
|
+
try {
|
|
1604
|
+
const wfPath = path.join(targetWorkflowsDir, wf);
|
|
1605
|
+
const wfRaw = await fs.readFile(wfPath, 'utf8');
|
|
1606
|
+
const wfParsed = JSON.parse(wfRaw);
|
|
1607
|
+
if (String(wfParsed.id ?? '') === targetWorkflowId || String(wfParsed.name ?? '') === targetWorkflowId) {
|
|
1608
|
+
targetWorkflowFile = wf;
|
|
1609
|
+
break;
|
|
1610
|
+
}
|
|
1611
|
+
} catch { /* skip unparseable workflows */ }
|
|
1612
|
+
}
|
|
1613
|
+
} catch { /* target workflows dir may not exist */ }
|
|
1614
|
+
}
|
|
1615
|
+
|
|
1616
|
+
if (!targetWorkflowFile) {
|
|
1617
|
+
throw new Error(`Handoff target workflow "${targetWorkflowId}" not found in team "${targetTeamId}"`);
|
|
1618
|
+
}
|
|
1619
|
+
|
|
1620
|
+
// Enqueue the target workflow run with triggerInput
|
|
1621
|
+
const enqueueResult = await enqueueWorkflowRunForHandoff(api, {
|
|
1622
|
+
teamId: targetTeamId,
|
|
1623
|
+
workflowFile: targetWorkflowFile,
|
|
1624
|
+
trigger: { kind: 'handoff', at: new Date().toISOString() },
|
|
1625
|
+
triggerInput,
|
|
1626
|
+
});
|
|
1627
|
+
|
|
1628
|
+
const handoffMode = asString(config['mode'] ?? 'fire-and-forget').trim() || 'fire-and-forget';
|
|
1629
|
+
|
|
1630
|
+
// Save initial handoff output
|
|
1631
|
+
const defaultNodeOutputRel = path.join('node-outputs', `${String(nodeIdx).padStart(3, '0')}-${node.id}.json`);
|
|
1632
|
+
const nodeOutputRel = String(node?.output?.path ?? '').trim() || defaultNodeOutputRel;
|
|
1633
|
+
const nodeOutputAbs = path.resolve(runDir, nodeOutputRel);
|
|
1634
|
+
await ensureDir(path.dirname(nodeOutputAbs));
|
|
1635
|
+
|
|
1636
|
+
if (handoffMode === 'wait-for-completion') {
|
|
1637
|
+
// Phase 2: Wait for target run to complete
|
|
1638
|
+
const waitTimeoutMs = typeof config['waitTimeoutMs'] === 'number' ? config['waitTimeoutMs'] as number : 5 * 60 * 1000;
|
|
1639
|
+
|
|
1640
|
+
const outputObj = {
|
|
1641
|
+
runId: task.runId,
|
|
1642
|
+
teamId,
|
|
1643
|
+
nodeId: node.id,
|
|
1644
|
+
kind: 'handoff',
|
|
1645
|
+
text: JSON.stringify({
|
|
1646
|
+
targetTeamId,
|
|
1647
|
+
targetWorkflowId,
|
|
1648
|
+
targetWorkflowFile,
|
|
1649
|
+
targetRunId: enqueueResult.runId,
|
|
1650
|
+
status: 'waiting',
|
|
1651
|
+
triggerInputKeys: Object.keys(triggerInput),
|
|
1652
|
+
}, null, 2),
|
|
1653
|
+
};
|
|
1654
|
+
await fs.writeFile(nodeOutputAbs, JSON.stringify(outputObj, null, 2) + '\n', 'utf8');
|
|
1655
|
+
|
|
1656
|
+
// Write handoff wait marker so the polling loop can find it
|
|
1657
|
+
const handoffWaitDir = path.join(runDir, 'handoff-waits');
|
|
1658
|
+
await ensureDir(handoffWaitDir);
|
|
1659
|
+
const waitMarker = {
|
|
1660
|
+
nodeId: node.id,
|
|
1661
|
+
nodeIdx,
|
|
1662
|
+
targetTeamId,
|
|
1663
|
+
targetWorkflowId,
|
|
1664
|
+
targetWorkflowFile,
|
|
1665
|
+
targetRunId: enqueueResult.runId,
|
|
1666
|
+
startedAt: new Date().toISOString(),
|
|
1667
|
+
timeoutAt: new Date(Date.now() + waitTimeoutMs).toISOString(),
|
|
1668
|
+
nodeOutputRel,
|
|
1669
|
+
};
|
|
1670
|
+
await fs.writeFile(
|
|
1671
|
+
path.join(handoffWaitDir, `${node.id}.json`),
|
|
1672
|
+
JSON.stringify(waitMarker, null, 2) + '\n',
|
|
1673
|
+
'utf8',
|
|
1674
|
+
);
|
|
1675
|
+
|
|
1676
|
+
const waitingTs = new Date().toISOString();
|
|
1677
|
+
await appendRunLog(runPath, (cur) => ({
|
|
1678
|
+
...cur,
|
|
1679
|
+
status: 'waiting_handoff',
|
|
1680
|
+
nodeStates: { ...(cur.nodeStates ?? {}), [node.id]: { status: 'waiting', ts: waitingTs } },
|
|
1681
|
+
events: [...cur.events, {
|
|
1682
|
+
ts: waitingTs, type: 'node.waiting_handoff', nodeId: node.id, kind: 'handoff',
|
|
1683
|
+
targetTeamId, targetWorkflowId, targetRunId: enqueueResult.runId,
|
|
1684
|
+
mode: 'wait-for-completion', timeoutAt: waitMarker.timeoutAt,
|
|
1685
|
+
}],
|
|
1686
|
+
nodeResults: [...(cur.nodeResults ?? []), {
|
|
1687
|
+
nodeId: node.id, kind: 'handoff',
|
|
1688
|
+
targetTeamId, targetWorkflowId, targetRunId: enqueueResult.runId,
|
|
1689
|
+
nodeOutputPath: path.relative(teamDir, nodeOutputAbs),
|
|
1690
|
+
}],
|
|
1691
|
+
}));
|
|
1692
|
+
|
|
1693
|
+
results.push({ taskId: task.id, runId: task.runId, nodeId: task.nodeId, status: 'waiting_handoff' });
|
|
1694
|
+
continue; // Skip the normal next-node enqueue logic
|
|
1695
|
+
} else {
|
|
1696
|
+
// Fire-and-forget: complete immediately
|
|
1697
|
+
const outputObj = {
|
|
1698
|
+
runId: task.runId,
|
|
1699
|
+
teamId,
|
|
1700
|
+
nodeId: node.id,
|
|
1701
|
+
kind: 'handoff',
|
|
1702
|
+
completedAt: new Date().toISOString(),
|
|
1703
|
+
text: JSON.stringify({
|
|
1704
|
+
targetTeamId,
|
|
1705
|
+
targetWorkflowId,
|
|
1706
|
+
targetWorkflowFile,
|
|
1707
|
+
targetRunId: enqueueResult.runId,
|
|
1708
|
+
status: 'enqueued',
|
|
1709
|
+
triggerInputKeys: Object.keys(triggerInput),
|
|
1710
|
+
}, null, 2),
|
|
1711
|
+
};
|
|
1712
|
+
await fs.writeFile(nodeOutputAbs, JSON.stringify(outputObj, null, 2) + '\n', 'utf8');
|
|
1713
|
+
|
|
1714
|
+
const completedTs = new Date().toISOString();
|
|
1715
|
+
await appendRunLog(runPath, (cur) => ({
|
|
1716
|
+
...cur,
|
|
1717
|
+
nextNodeIndex: nodeIdx + 1,
|
|
1718
|
+
nodeStates: { ...(cur.nodeStates ?? {}), [node.id]: { status: 'success', ts: completedTs } },
|
|
1719
|
+
events: [...cur.events, {
|
|
1720
|
+
ts: completedTs, type: 'node.completed', nodeId: node.id, kind: 'handoff',
|
|
1721
|
+
targetTeamId, targetWorkflowId, targetRunId: enqueueResult.runId,
|
|
1722
|
+
nodeOutputPath: path.relative(teamDir, nodeOutputAbs),
|
|
1723
|
+
}],
|
|
1724
|
+
nodeResults: [...(cur.nodeResults ?? []), {
|
|
1725
|
+
nodeId: node.id, kind: 'handoff',
|
|
1726
|
+
targetTeamId, targetWorkflowId, targetRunId: enqueueResult.runId,
|
|
1727
|
+
nodeOutputPath: path.relative(teamDir, nodeOutputAbs),
|
|
1728
|
+
}],
|
|
1729
|
+
}));
|
|
1730
|
+
}
|
|
1201
1731
|
} else {
|
|
1202
1732
|
throw new Error(`Worker does not yet support node kind: ${kind}`);
|
|
1203
1733
|
}
|
|
@@ -1212,6 +1742,11 @@ export async function runWorkflowWorkerTick(api: OpenClawPluginApi, opts: {
|
|
|
1212
1742
|
continue;
|
|
1213
1743
|
}
|
|
1214
1744
|
|
|
1745
|
+
if (updated.status === 'waiting_handoff') {
|
|
1746
|
+
results.push({ taskId: task.id, runId: task.runId, nodeId: task.nodeId, status: 'waiting_handoff' });
|
|
1747
|
+
continue;
|
|
1748
|
+
}
|
|
1749
|
+
|
|
1215
1750
|
let enqueueIdx = pickNextRunnableNodeIndex({ workflow, run: updated });
|
|
1216
1751
|
|
|
1217
1752
|
// Auto-complete start/end nodes.
|