ofiere-openclaw-plugin 3.5.5 → 4.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +2 -2
- package/src/prompt.ts +20 -8
- package/src/tools.ts +394 -61
package/package.json
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "ofiere-openclaw-plugin",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "4.1.0",
|
|
4
4
|
"type": "module",
|
|
5
|
-
"description": "OpenClaw plugin for Ofiere PM - 10 meta-tools covering tasks, agents, projects, scheduling, knowledge, workflows, notifications, memory, prompts, and constellation agent architecture",
|
|
5
|
+
"description": "OpenClaw plugin for Ofiere PM - 10 meta-tools with 13-action workflow mastery covering tasks, agents, projects, scheduling, knowledge, workflows, notifications, memory, prompts, and constellation agent architecture",
|
|
6
6
|
"keywords": ["openclaw", "ofiere", "project-management", "agents", "plugin"],
|
|
7
7
|
"homepage": "https://github.com/gilanggemar/Ofiere",
|
|
8
8
|
"repository": {
|
package/src/prompt.ts
CHANGED
|
@@ -40,17 +40,24 @@ const TOOL_DOCS: Record<string, string> = {
|
|
|
40
40
|
- create: Add knowledge to the library. Requires: file_name. Optional: content, source, author, credibility_tier
|
|
41
41
|
- update/delete: By document ID`,
|
|
42
42
|
|
|
43
|
-
OFIERE_WORKFLOW_OPS: `- **OFIERE_WORKFLOW_OPS** —
|
|
43
|
+
OFIERE_WORKFLOW_OPS: `- **OFIERE_WORKFLOW_OPS** — Full workflow automation control (13 actions)
|
|
44
44
|
- list: All workflows, filter by status (draft, active, paused, archived)
|
|
45
|
-
- get: Full workflow details
|
|
46
|
-
- create: New workflow with name + nodes and edges
|
|
47
|
-
|
|
48
|
-
- Each node: { type, data: { label, ...type-specific fields } }. IDs/positions auto-generated
|
|
49
|
-
- Edges: { source, target }. A manual_trigger is auto-prepended if no trigger exists
|
|
50
|
-
- update: Modify workflow (name, description, status, nodes, edges)
|
|
45
|
+
- get: Full workflow details with all node IDs, types, data, edges — ALWAYS call this before surgical edits
|
|
46
|
+
- create: New workflow with name + nodes[] and edges[]. A manual_trigger is auto-prepended if missing
|
|
47
|
+
- update: Replace entire workflow graph (name, description, status, nodes, edges)
|
|
51
48
|
- delete: Remove workflow and all run history
|
|
52
49
|
- list_runs: Recent execution history for a workflow
|
|
53
|
-
- trigger: Start a workflow run
|
|
50
|
+
- trigger: Start a workflow run
|
|
51
|
+
- **add_nodes**: Add new nodes to an existing workflow. Required: workflow_id, nodes[]. Optional: edges[] to connect them
|
|
52
|
+
- **update_node**: Edit a specific node's data fields (e.g. change task instructions, agentId, template). Required: workflow_id, node_id, data. Only specified fields are changed — others are preserved
|
|
53
|
+
- **delete_nodes**: Remove specific nodes and all their connected edges. Required: workflow_id, node_ids[]
|
|
54
|
+
- **add_edges**: Add new connections between existing nodes. Required: workflow_id, edges[]. Each edge: { source, target, sourceHandle?, targetHandle? }
|
|
55
|
+
- **delete_edges**: Remove specific edges by ID. Required: workflow_id, edge_ids[]
|
|
56
|
+
- **insert_node_between**: Insert a new node between two connected nodes (auto-rewires edges). Required: workflow_id, source_node_id, target_node_id, node. Use this to add steps in the middle of a flow
|
|
57
|
+
- Node types: manual_trigger, webhook_trigger, agent_step, formatter_step, http_request, task_call, variable_set, condition, human_approval, delay, loop, convergence, output, checkpoint, note
|
|
58
|
+
- Key fields: agent_step(agentId, task, responseMode, timeoutSec), formatter_step(template, formatMode), condition(expression, varCheck, operator, varMatch), human_approval(instructions), variable_set(variableName, variableValue, operation)
|
|
59
|
+
- Edge handles: condition edges use sourceHandle "condition-true"/"condition-false". Loop edges use "loop_body"/"done"
|
|
60
|
+
- Variables: Use {{prev.nodeId.outputText}} for prior outputs, {{variables.key}} for stored variables`,
|
|
54
61
|
|
|
55
62
|
OFIERE_NOTIFY_OPS: `- **OFIERE_NOTIFY_OPS** — Notifications (action: "list", "mark_read", "mark_all_read", "delete")
|
|
56
63
|
- list: Recent notifications. unread_only=true for unread only
|
|
@@ -130,6 +137,11 @@ ${toolDocs}
|
|
|
130
137
|
- When creating or editing an agent's architecture, ALWAYS use OFIERE_CONSTELLATION_OPS action:"read_blueprint" first to understand the required structure.
|
|
131
138
|
- When creating a new agent, use OFIERE_CONSTELLATION_OPS action:"create_agent" with all available structured params. The agent will be auto-registered in OpenClaw.
|
|
132
139
|
- When deleting an agent, ALWAYS ask the user for explicit confirmation BEFORE calling OFIERE_CONSTELLATION_OPS action:"delete_agent" with confirm: true. Show them what will be deleted. This action is IRREVERSIBLE.
|
|
140
|
+
- WORKFLOW MASTERY: When modifying existing workflows, ALWAYS call "get" first to see all node IDs and the current graph structure.
|
|
141
|
+
- To add a step in the middle of a flow, use "insert_node_between" with the source and target node IDs. Do NOT rebuild the entire graph.
|
|
142
|
+
- To change a node's configuration (e.g. update agent instructions, change template text), use "update_node" with just the fields that changed.
|
|
143
|
+
- When creating workflows with condition or loop nodes, specify sourceHandle on edges: "condition-true"/"condition-false" for conditions, "loop_body"/"done" for loops.
|
|
144
|
+
- Fill node data fields with actual content — include real task instructions, templates, and variable references. Do NOT leave fields empty unless intentionally blank.
|
|
133
145
|
</ofiere-pm>`;
|
|
134
146
|
}
|
|
135
147
|
|
package/src/tools.ts
CHANGED
|
@@ -1141,6 +1141,17 @@ function registerKnowledgeOps(
|
|
|
1141
1141
|
});
|
|
1142
1142
|
}
|
|
1143
1143
|
|
|
1144
|
+
// ─── Workflow Mutation Serialization Queue ────────────────────────────────────
|
|
1145
|
+
// Prevents parallel mutations on the same workflow from racing (last-write-wins).
|
|
1146
|
+
// Each workflow gets a sequential promise chain — mutations queue behind previous ones.
|
|
1147
|
+
const _wfLockChain = new Map<string, Promise<any>>();
|
|
1148
|
+
function sequentialWorkflowOp<T>(wfId: string, fn: () => Promise<T>): Promise<T> {
|
|
1149
|
+
const prev = _wfLockChain.get(wfId) || Promise.resolve();
|
|
1150
|
+
const next = prev.catch(() => {}).then(fn);
|
|
1151
|
+
_wfLockChain.set(wfId, next.catch(() => {}));
|
|
1152
|
+
return next;
|
|
1153
|
+
}
|
|
1154
|
+
|
|
1144
1155
|
// ═══════════════════════════════════════════════════════════════════════════════
|
|
1145
1156
|
// META-TOOL 6: OFIERE_WORKFLOW_OPS — Workflow Management & Execution
|
|
1146
1157
|
// ═══════════════════════════════════════════════════════════════════════════════
|
|
@@ -1155,46 +1166,60 @@ function registerWorkflowOps(
|
|
|
1155
1166
|
label: "Ofiere Workflow Operations",
|
|
1156
1167
|
description:
|
|
1157
1168
|
`Manage, build, and trigger automated workflows in the Ofiere dashboard.\n\n` +
|
|
1158
|
-
`
|
|
1169
|
+
`ACTIONS:\n` +
|
|
1159
1170
|
`- "list": List all workflows. Optional: status\n` +
|
|
1160
|
-
`- "get": Get workflow details. Required: id\n` +
|
|
1161
|
-
`- "create": Create a workflow WITH nodes and edges. Required: name. Optional: description, nodes, edges,
|
|
1162
|
-
`- "update": Update
|
|
1171
|
+
`- "get": Get full workflow details including all nodes/edges. Required: id\n` +
|
|
1172
|
+
`- "create": Create a workflow WITH nodes and edges. Required: name. Optional: description, nodes, edges, status\n` +
|
|
1173
|
+
`- "update": Update workflow metadata or replace entire graph. Required: id. Optional: name, description, status, nodes, edges\n` +
|
|
1163
1174
|
`- "delete": Delete a workflow and its run history. Required: id\n` +
|
|
1164
1175
|
`- "list_runs": List recent runs. Required: workflow_id. Optional: limit\n` +
|
|
1165
|
-
`- "trigger": Start a workflow run. Required: workflow_id\n
|
|
1166
|
-
|
|
1176
|
+
`- "trigger": Start a workflow run. Required: workflow_id\n` +
|
|
1177
|
+
`- "add_nodes": Add new nodes to an existing workflow. Required: workflow_id, nodes[]\n` +
|
|
1178
|
+
`- "update_node": Edit a specific node's data fields without replacing the graph. Required: workflow_id, node_id, data\n` +
|
|
1179
|
+
`- "delete_nodes": Remove specific nodes and their edges. Required: workflow_id, node_ids[]\n` +
|
|
1180
|
+
`- "add_edges": Add new connections between nodes. Required: workflow_id, edges[]\n` +
|
|
1181
|
+
`- "delete_edges": Remove specific edges. Required: workflow_id, edge_ids[]\n` +
|
|
1182
|
+
`- "insert_node_between": Insert a new node between two connected nodes (auto-rewires edges). Required: workflow_id, source_node_id, target_node_id, node\n\n` +
|
|
1183
|
+
`NODE TYPES & CONFIGURABLE FIELDS:\n` +
|
|
1167
1184
|
` TRIGGERS (start of workflow — pick one):\n` +
|
|
1168
|
-
`
|
|
1169
|
-
`
|
|
1170
|
-
` - "schedule_trigger": Runs on cron schedule. data: { label, cron: "0 9 * * 1-5" }\n` +
|
|
1185
|
+
` "manual_trigger": { label }\n` +
|
|
1186
|
+
` "webhook_trigger": { label, triggerName?, webhookPayload? }\n\n` +
|
|
1171
1187
|
` STEPS (the work):\n` +
|
|
1172
|
-
`
|
|
1173
|
-
`
|
|
1174
|
-
`
|
|
1175
|
-
`
|
|
1176
|
-
`
|
|
1188
|
+
` "agent_step": { label, agentId: string, agentName?: string, task: string (prompt/instructions), responseMode: "text"|"json", timeoutSec: number(120) }\n` +
|
|
1189
|
+
` "formatter_step": { label, template: string (use {{prev.nodeId.outputText}}), formatMode: "template"|"uppercase"|"lowercase"|"extract_json"|"remove_whitespace", outputKey?: string }\n` +
|
|
1190
|
+
` "http_request": { label, method: "GET"|"POST"|"PUT"|"PATCH"|"DELETE", url: string, body?: string (JSON), headers?: object, timeoutSec: number(30) }\n` +
|
|
1191
|
+
` "task_call": { label, agentId: string, taskId: string, taskTitle?: string, agentName?: string, systemPromptOverride?: string }\n` +
|
|
1192
|
+
` "variable_set": { label, variableName: string, variableValue: string, operation: "set"|"append"|"prepend" }\n\n` +
|
|
1177
1193
|
` CONTROL FLOW:\n` +
|
|
1178
|
-
`
|
|
1179
|
-
`
|
|
1180
|
-
`
|
|
1181
|
-
`
|
|
1182
|
-
`
|
|
1194
|
+
` "condition": { label, expression: string (JS), varCheck?: string, operator?: "=="|"!="|"contains"|"not_contains"|"starts_with"|"ends_with"|"is_empty"|"not_empty", varMatch?: string }\n` +
|
|
1195
|
+
` → EDGES: use sourceHandle "condition-true" and "condition-false" for the two branches\n` +
|
|
1196
|
+
` "human_approval": { label, instructions: string, checkpointId?: string (for rejection cycle) }\n` +
|
|
1197
|
+
` "delay": { label, delaySec: number(1-3600) }\n` +
|
|
1198
|
+
` "loop": { label, loopType: "count"|"for_each", maxIterations: number, iterateOver?: string, checkpointId?: string }\n` +
|
|
1199
|
+
` → EDGES: use sourceHandle "loop_body" for the loop body path and "done" for the exit path\n` +
|
|
1200
|
+
` "convergence": { label, mergeStrategy: "wait_all"|"first_arrives" }\n\n` +
|
|
1183
1201
|
` END:\n` +
|
|
1184
|
-
`
|
|
1202
|
+
` "output": { label, outputMode: "return"|"notification"|"webhook"|"log", template?: string, webhookUrl?: string, checkpointId?: string }\n\n` +
|
|
1185
1203
|
` SPECIAL:\n` +
|
|
1186
|
-
`
|
|
1187
|
-
`
|
|
1188
|
-
`
|
|
1189
|
-
`
|
|
1190
|
-
`
|
|
1204
|
+
` "checkpoint": { label } — Loop/cycle target marker\n` +
|
|
1205
|
+
` "note": { label, noteText: string } — Annotation, does not affect execution\n\n` +
|
|
1206
|
+
`VARIABLE SYNTAX: Use {{prev.nodeId.outputText}} to reference prior node outputs. Use {{variables.key}} for stored variables.\n\n` +
|
|
1207
|
+
`STRUCTURE:\n` +
|
|
1208
|
+
` Each node: { type, data: { label, ... }, position?: { x, y } }. IDs/positions auto-generated if omitted.\n` +
|
|
1209
|
+
` Each edge: { source, target, sourceHandle?, targetHandle? }. IDs auto-generated.\n` +
|
|
1210
|
+
` A manual_trigger node is auto-prepended if no trigger node is included on create.\n\n` +
|
|
1211
|
+
`TIPS:\n` +
|
|
1212
|
+
` - Use "insert_node_between" to add a step in the middle of an existing flow (e.g. add a review gate between formatter and output).\n` +
|
|
1213
|
+
` - Use "update_node" to fill in or change a specific node's fields without rebuilding the whole graph.\n` +
|
|
1214
|
+
` - Use "get" first to see the current graph before making surgical edits.\n` +
|
|
1215
|
+
` - For condition/loop nodes, specify sourceHandle on edges to control which branch path is taken.`,
|
|
1191
1216
|
parameters: {
|
|
1192
1217
|
type: "object",
|
|
1193
1218
|
required: ["action"],
|
|
1194
1219
|
properties: {
|
|
1195
|
-
action: { type: "string", enum: ["list", "get", "create", "update", "delete", "list_runs", "trigger"] },
|
|
1220
|
+
action: { type: "string", enum: ["list", "get", "create", "update", "delete", "list_runs", "trigger", "add_nodes", "update_node", "delete_nodes", "add_edges", "delete_edges", "insert_node_between"] },
|
|
1196
1221
|
id: { type: "string", description: "Workflow ID" },
|
|
1197
|
-
workflow_id: { type: "string", description: "Workflow ID
|
|
1222
|
+
workflow_id: { type: "string", description: "Workflow ID" },
|
|
1198
1223
|
name: { type: "string", description: "Workflow name" },
|
|
1199
1224
|
description: { type: "string" },
|
|
1200
1225
|
nodes: {
|
|
@@ -1203,9 +1228,9 @@ function registerWorkflowOps(
|
|
|
1203
1228
|
type: "object",
|
|
1204
1229
|
properties: {
|
|
1205
1230
|
id: { type: "string", description: "Node ID (auto-generated if omitted)" },
|
|
1206
|
-
type: { type: "string", enum: ["manual_trigger", "webhook_trigger", "
|
|
1231
|
+
type: { type: "string", enum: ["manual_trigger", "webhook_trigger", "agent_step", "formatter_step", "http_request", "task_call", "variable_set", "condition", "human_approval", "delay", "loop", "convergence", "output", "checkpoint", "note"] },
|
|
1207
1232
|
position: { type: "object", properties: { x: { type: "number" }, y: { type: "number" } } },
|
|
1208
|
-
data: { type: "object", description: "Node config — always include a 'label' field. See NODE TYPES above for
|
|
1233
|
+
data: { type: "object", additionalProperties: true, description: "Node config — always include a 'label' field. See NODE TYPES above for all configurable fields per type." },
|
|
1209
1234
|
},
|
|
1210
1235
|
},
|
|
1211
1236
|
description: "Workflow graph nodes",
|
|
@@ -1218,10 +1243,28 @@ function registerWorkflowOps(
|
|
|
1218
1243
|
id: { type: "string", description: "Edge ID (auto-generated if omitted)" },
|
|
1219
1244
|
source: { type: "string", description: "Source node ID" },
|
|
1220
1245
|
target: { type: "string", description: "Target node ID" },
|
|
1246
|
+
sourceHandle: { type: "string", description: "Source handle: 'condition-true'/'condition-false' for conditions, 'loop_body'/'done' for loops" },
|
|
1247
|
+
targetHandle: { type: "string", description: "Target handle (rarely needed)" },
|
|
1221
1248
|
},
|
|
1222
1249
|
},
|
|
1223
|
-
description: "Connections between nodes
|
|
1250
|
+
description: "Connections between nodes",
|
|
1251
|
+
},
|
|
1252
|
+
// Granular edit params
|
|
1253
|
+
node_id: { type: "string", description: "Specific node ID for update_node" },
|
|
1254
|
+
node_ids: { type: "array", items: { type: "string" }, description: "Node IDs for delete_nodes" },
|
|
1255
|
+
edge_ids: { type: "array", items: { type: "string" }, description: "Edge IDs for delete_edges" },
|
|
1256
|
+
node: {
|
|
1257
|
+
type: "object",
|
|
1258
|
+
description: "Single node definition for insert_node_between. { type, data: { label, ... } }",
|
|
1259
|
+
properties: {
|
|
1260
|
+
type: { type: "string" },
|
|
1261
|
+
data: { type: "object", additionalProperties: true },
|
|
1262
|
+
position: { type: "object", properties: { x: { type: "number" }, y: { type: "number" } } },
|
|
1263
|
+
},
|
|
1224
1264
|
},
|
|
1265
|
+
data: { type: "object", additionalProperties: true, description: "Data fields to merge into a node (for update_node). Only specified fields are changed." },
|
|
1266
|
+
source_node_id: { type: "string", description: "Source node ID for insert_node_between" },
|
|
1267
|
+
target_node_id: { type: "string", description: "Target node ID for insert_node_between" },
|
|
1225
1268
|
steps: { type: "array", items: { type: "object" }, description: "Legacy V1 step definitions" },
|
|
1226
1269
|
schedule: { type: "string", description: "Cron expression or schedule" },
|
|
1227
1270
|
status: { type: "string", enum: ["draft", "active", "paused", "archived"] },
|
|
@@ -1232,21 +1275,22 @@ function registerWorkflowOps(
|
|
|
1232
1275
|
const action = params.action as string;
|
|
1233
1276
|
|
|
1234
1277
|
// Default data for each node type — ensures dashboard renders them properly
|
|
1278
|
+
// schedule_trigger kept for backward compatibility with existing workflows
|
|
1235
1279
|
const NODE_DEFAULTS: Record<string, Record<string, any>> = {
|
|
1236
1280
|
manual_trigger: { label: "Execute Trigger" },
|
|
1237
1281
|
webhook_trigger: { label: "Webhook Trigger" },
|
|
1238
1282
|
schedule_trigger: { label: "Schedule Trigger", cron: "0 9 * * 1-5" },
|
|
1239
|
-
agent_step: { label: "Agent Step", agentId: "", task: "", responseMode: "text", timeoutSec: 120 },
|
|
1240
|
-
formatter_step: { label: "Formatter", template: "" },
|
|
1241
|
-
http_request: { label: "HTTP Request", method: "GET", url: "" },
|
|
1242
|
-
task_call: { label: "Task", agentId: "", taskId: "", taskTitle: "", agentName: "" },
|
|
1243
|
-
variable_set: { label: "Set Variable", variableName: "", variableValue: "" },
|
|
1244
|
-
condition: { label: "Condition", expression: "" },
|
|
1245
|
-
human_approval: { label: "Human Approval", instructions: "" },
|
|
1283
|
+
agent_step: { label: "Agent Step", agentId: "", agentName: "", task: "", responseMode: "text", timeoutSec: 120 },
|
|
1284
|
+
formatter_step: { label: "Formatter", template: "", formatMode: "template", outputKey: "" },
|
|
1285
|
+
http_request: { label: "HTTP Request", method: "GET", url: "", body: "", headers: {}, timeoutSec: 30 },
|
|
1286
|
+
task_call: { label: "Task", agentId: "", taskId: "", taskTitle: "", agentName: "", systemPromptOverride: "" },
|
|
1287
|
+
variable_set: { label: "Set Variable", variableName: "", variableValue: "", operation: "set" },
|
|
1288
|
+
condition: { label: "Condition", expression: "", varCheck: "", operator: "==", varMatch: "" },
|
|
1289
|
+
human_approval: { label: "Human Approval", instructions: "", checkpointId: "" },
|
|
1246
1290
|
delay: { label: "Delay", delaySec: 5 },
|
|
1247
|
-
loop: { label: "Loop", loopType: "count", maxIterations: 3 },
|
|
1291
|
+
loop: { label: "Loop", loopType: "count", maxIterations: 3, iterateOver: "", checkpointId: "" },
|
|
1248
1292
|
convergence: { label: "Convergence", mergeStrategy: "wait_all" },
|
|
1249
|
-
output: { label: "Output", outputMode: "return" },
|
|
1293
|
+
output: { label: "Output", outputMode: "return", template: "", webhookUrl: "", checkpointId: "" },
|
|
1250
1294
|
checkpoint: { label: "Checkpoint" },
|
|
1251
1295
|
note: { label: "Note", noteText: "" },
|
|
1252
1296
|
};
|
|
@@ -1267,6 +1311,42 @@ function registerWorkflowOps(
|
|
|
1267
1311
|
};
|
|
1268
1312
|
}
|
|
1269
1313
|
|
|
1314
|
+
// Helper: normalize a single edge with auto-ID
|
|
1315
|
+
function normalizeEdge(e: any, i: number) {
|
|
1316
|
+
return {
|
|
1317
|
+
id: e.id || `edge-${Date.now()}-${i}`,
|
|
1318
|
+
source: e.source,
|
|
1319
|
+
target: e.target,
|
|
1320
|
+
...(e.sourceHandle ? { sourceHandle: e.sourceHandle } : {}),
|
|
1321
|
+
...(e.targetHandle ? { targetHandle: e.targetHandle } : {}),
|
|
1322
|
+
};
|
|
1323
|
+
}
|
|
1324
|
+
|
|
1325
|
+
// Helper: fetch current workflow graph
|
|
1326
|
+
async function fetchWorkflow(wfId: string) {
|
|
1327
|
+
const { data, error } = await supabase
|
|
1328
|
+
.from("workflows")
|
|
1329
|
+
.select("*")
|
|
1330
|
+
.eq("id", wfId)
|
|
1331
|
+
.eq("user_id", userId)
|
|
1332
|
+
.single();
|
|
1333
|
+
if (error) return { wf: null, error: error.message };
|
|
1334
|
+
return { wf: data, error: null };
|
|
1335
|
+
}
|
|
1336
|
+
|
|
1337
|
+
// Helper: save updated graph back to DB
|
|
1338
|
+
async function saveGraph(wfId: string, nodes: any[], edges: any[]) {
|
|
1339
|
+
const { data, error } = await supabase
|
|
1340
|
+
.from("workflows")
|
|
1341
|
+
.update({ nodes, edges, updated_at: new Date().toISOString() })
|
|
1342
|
+
.eq("id", wfId)
|
|
1343
|
+
.eq("user_id", userId)
|
|
1344
|
+
.select()
|
|
1345
|
+
.single();
|
|
1346
|
+
if (error) return { wf: null, error: error.message };
|
|
1347
|
+
return { wf: data, error: null };
|
|
1348
|
+
}
|
|
1349
|
+
|
|
1270
1350
|
switch (action) {
|
|
1271
1351
|
case "list": {
|
|
1272
1352
|
let q = supabase.from("workflows").select("*").eq("user_id", userId).order("updated_at", { ascending: false });
|
|
@@ -1302,7 +1382,6 @@ function registerWorkflowOps(
|
|
|
1302
1382
|
position: { x: 100, y: 200 },
|
|
1303
1383
|
data: { label: "Execute Trigger" },
|
|
1304
1384
|
};
|
|
1305
|
-
// Shift all other nodes to the right
|
|
1306
1385
|
finalNodes = finalNodes.map(n => ({
|
|
1307
1386
|
...n,
|
|
1308
1387
|
position: { x: (n.position?.x || 250) + 200, y: n.position?.y || 200 },
|
|
@@ -1310,23 +1389,14 @@ function registerWorkflowOps(
|
|
|
1310
1389
|
finalNodes.unshift(triggerNode);
|
|
1311
1390
|
}
|
|
1312
1391
|
|
|
1313
|
-
// Build edges
|
|
1314
|
-
let finalEdges = (params.edges as any[]) || [];
|
|
1315
|
-
finalEdges = finalEdges.map((e: any, i: number) => ({
|
|
1316
|
-
id: e.id || `edge-${Date.now()}-${i}`,
|
|
1317
|
-
source: e.source,
|
|
1318
|
-
target: e.target,
|
|
1319
|
-
...(e.sourceHandle ? { sourceHandle: e.sourceHandle } : {}),
|
|
1320
|
-
...(e.targetHandle ? { targetHandle: e.targetHandle } : {}),
|
|
1321
|
-
}));
|
|
1392
|
+
// Build edges
|
|
1393
|
+
let finalEdges = ((params.edges as any[]) || []).map((e: any, i: number) => normalizeEdge(e, i));
|
|
1322
1394
|
|
|
1323
1395
|
// Auto-wire trigger to first non-trigger node if no edge connects from trigger
|
|
1324
|
-
if (hasTrigger
|
|
1325
|
-
// No edges at all — auto-connect trigger → first step
|
|
1326
|
-
} else if (hasTrigger === false && finalNodes.length > 1) {
|
|
1396
|
+
if (!hasTrigger && finalNodes.length > 1) {
|
|
1327
1397
|
const triggerId = finalNodes[0].id;
|
|
1328
1398
|
const firstStepId = finalNodes[1].id;
|
|
1329
|
-
const triggerHasEdge = finalEdges.some(e => e.source === triggerId);
|
|
1399
|
+
const triggerHasEdge = finalEdges.some((e: any) => e.source === triggerId);
|
|
1330
1400
|
if (!triggerHasEdge) {
|
|
1331
1401
|
finalEdges.unshift({
|
|
1332
1402
|
id: `edge-trigger-${Date.now()}`,
|
|
@@ -1360,16 +1430,11 @@ function registerWorkflowOps(
|
|
|
1360
1430
|
for (const f of ["name", "description", "status", "steps", "schedule", "nodes", "edges"]) {
|
|
1361
1431
|
if ((params as any)[f] !== undefined) upd[f] = (params as any)[f];
|
|
1362
1432
|
}
|
|
1363
|
-
// Normalize nodes using the same defaults as create
|
|
1364
1433
|
if (upd.nodes && Array.isArray(upd.nodes)) {
|
|
1365
1434
|
upd.nodes = upd.nodes.map((n: any, i: number) => normalizeNode(n, i));
|
|
1366
1435
|
}
|
|
1367
1436
|
if (upd.edges && Array.isArray(upd.edges)) {
|
|
1368
|
-
upd.edges = upd.edges.map((e: any, i: number) => (
|
|
1369
|
-
id: e.id || `edge-${Date.now()}-${i}`,
|
|
1370
|
-
source: e.source,
|
|
1371
|
-
target: e.target,
|
|
1372
|
-
}));
|
|
1437
|
+
upd.edges = upd.edges.map((e: any, i: number) => normalizeEdge(e, i));
|
|
1373
1438
|
}
|
|
1374
1439
|
const { data, error } = await supabase.from("workflows").update(upd).eq("id", wfId).eq("user_id", userId).select().single();
|
|
1375
1440
|
if (error) return err(error.message);
|
|
@@ -1378,7 +1443,6 @@ function registerWorkflowOps(
|
|
|
1378
1443
|
case "delete": {
|
|
1379
1444
|
const wfId = (params.id || params.workflow_id) as string;
|
|
1380
1445
|
if (!wfId) return err("Missing required: id");
|
|
1381
|
-
// Delete associated runs first
|
|
1382
1446
|
await supabase.from("workflow_runs").delete().eq("workflow_id", wfId);
|
|
1383
1447
|
const { error } = await supabase.from("workflows").delete().eq("id", wfId).eq("user_id", userId);
|
|
1384
1448
|
if (error) return err(error.message);
|
|
@@ -1401,6 +1465,7 @@ function registerWorkflowOps(
|
|
|
1401
1465
|
const { error } = await supabase.from("workflow_runs").insert({
|
|
1402
1466
|
id: runId,
|
|
1403
1467
|
workflow_id: wfId,
|
|
1468
|
+
user_id: userId,
|
|
1404
1469
|
status: "running",
|
|
1405
1470
|
started_at: new Date().toISOString(),
|
|
1406
1471
|
trigger_type: "agent",
|
|
@@ -1408,8 +1473,276 @@ function registerWorkflowOps(
|
|
|
1408
1473
|
if (error) return err(error.message);
|
|
1409
1474
|
return ok({ message: `Workflow run triggered`, run_id: runId, workflow_id: wfId });
|
|
1410
1475
|
}
|
|
1476
|
+
|
|
1477
|
+
// ─── Granular Node Operations ────────────────────────────────────
|
|
1478
|
+
|
|
1479
|
+
case "add_nodes": {
|
|
1480
|
+
const wfId = (params.workflow_id || params.id) as string;
|
|
1481
|
+
if (!wfId) return err("Missing required: workflow_id");
|
|
1482
|
+
const newNodes = params.nodes as any[];
|
|
1483
|
+
if (!newNodes || !Array.isArray(newNodes) || newNodes.length === 0) return err("Missing required: nodes[] (array of node definitions)");
|
|
1484
|
+
|
|
1485
|
+
return sequentialWorkflowOp(wfId, async () => {
|
|
1486
|
+
const { wf, error: fetchErr } = await fetchWorkflow(wfId);
|
|
1487
|
+
if (fetchErr || !wf) return err(fetchErr || "Workflow not found");
|
|
1488
|
+
|
|
1489
|
+
const existingNodes = (wf.nodes as any[]) || [];
|
|
1490
|
+
const existingEdges = (wf.edges as any[]) || [];
|
|
1491
|
+
|
|
1492
|
+
const maxY = existingNodes.reduce((max: number, n: any) => Math.max(max, n.position?.y || 0), 0);
|
|
1493
|
+
const normalized = newNodes.map((n, i) => {
|
|
1494
|
+
const node = normalizeNode(n, existingNodes.length + i);
|
|
1495
|
+
if (!n.position) {
|
|
1496
|
+
node.position = { x: 250, y: maxY + 120 + i * 150 };
|
|
1497
|
+
}
|
|
1498
|
+
return node;
|
|
1499
|
+
});
|
|
1500
|
+
|
|
1501
|
+
const allNodes = [...existingNodes, ...normalized];
|
|
1502
|
+
|
|
1503
|
+
let allEdges = existingEdges;
|
|
1504
|
+
if (params.edges && Array.isArray(params.edges)) {
|
|
1505
|
+
const newEdges = (params.edges as any[]).map((e: any, i: number) => normalizeEdge(e, existingEdges.length + i));
|
|
1506
|
+
allEdges = [...existingEdges, ...newEdges];
|
|
1507
|
+
}
|
|
1508
|
+
|
|
1509
|
+
const { wf: saved, error: saveErr } = await saveGraph(wfId, allNodes, allEdges);
|
|
1510
|
+
if (saveErr) return err(saveErr);
|
|
1511
|
+
|
|
1512
|
+
return ok({
|
|
1513
|
+
message: `Added ${normalized.length} node(s) to workflow`,
|
|
1514
|
+
added_nodes: normalized.map((n: any) => ({ id: n.id, type: n.type, label: n.data?.label })),
|
|
1515
|
+
total_nodes: allNodes.length,
|
|
1516
|
+
total_edges: allEdges.length,
|
|
1517
|
+
workflow: saved,
|
|
1518
|
+
});
|
|
1519
|
+
});
|
|
1520
|
+
}
|
|
1521
|
+
|
|
1522
|
+
case "update_node": {
|
|
1523
|
+
const wfId = (params.workflow_id || params.id) as string;
|
|
1524
|
+
if (!wfId) return err("Missing required: workflow_id");
|
|
1525
|
+
const nodeId = params.node_id as string;
|
|
1526
|
+
if (!nodeId) return err("Missing required: node_id");
|
|
1527
|
+
const dataUpdate = params.data as Record<string, any>;
|
|
1528
|
+
if (!dataUpdate || typeof dataUpdate !== "object") return err("Missing required: data (object with fields to update)");
|
|
1529
|
+
|
|
1530
|
+
return sequentialWorkflowOp(wfId, async () => {
|
|
1531
|
+
const { wf, error: fetchErr } = await fetchWorkflow(wfId);
|
|
1532
|
+
if (fetchErr || !wf) return err(fetchErr || "Workflow not found");
|
|
1533
|
+
|
|
1534
|
+
const nodes = (wf.nodes as any[]) || [];
|
|
1535
|
+
const nodeIndex = nodes.findIndex((n: any) => n.id === nodeId);
|
|
1536
|
+
if (nodeIndex === -1) return err(`Node "${nodeId}" not found in workflow. Use action "get" to see all node IDs.`);
|
|
1537
|
+
|
|
1538
|
+
// Merge new data into existing node data (preserves untouched fields)
|
|
1539
|
+
const existingData = nodes[nodeIndex].data || {};
|
|
1540
|
+
nodes[nodeIndex].data = { ...existingData, ...dataUpdate };
|
|
1541
|
+
|
|
1542
|
+
if (params.node && typeof params.node === "object" && (params.node as any).position) {
|
|
1543
|
+
nodes[nodeIndex].position = (params.node as any).position;
|
|
1544
|
+
}
|
|
1545
|
+
|
|
1546
|
+
const { wf: saved, error: saveErr } = await saveGraph(wfId, nodes, wf.edges || []);
|
|
1547
|
+
if (saveErr) return err(saveErr);
|
|
1548
|
+
|
|
1549
|
+
return ok({
|
|
1550
|
+
message: `Node "${nodeId}" updated`,
|
|
1551
|
+
node: { id: nodes[nodeIndex].id, type: nodes[nodeIndex].type, data: nodes[nodeIndex].data },
|
|
1552
|
+
fields_updated: Object.keys(dataUpdate),
|
|
1553
|
+
workflow: saved,
|
|
1554
|
+
});
|
|
1555
|
+
});
|
|
1556
|
+
}
|
|
1557
|
+
|
|
1558
|
+
case "delete_nodes": {
|
|
1559
|
+
const wfId = (params.workflow_id || params.id) as string;
|
|
1560
|
+
if (!wfId) return err("Missing required: workflow_id");
|
|
1561
|
+
const nodeIds = params.node_ids as string[];
|
|
1562
|
+
if (!nodeIds || !Array.isArray(nodeIds) || nodeIds.length === 0) return err("Missing required: node_ids[] (array of node IDs to delete)");
|
|
1563
|
+
|
|
1564
|
+
return sequentialWorkflowOp(wfId, async () => {
|
|
1565
|
+
const { wf, error: fetchErr } = await fetchWorkflow(wfId);
|
|
1566
|
+
if (fetchErr || !wf) return err(fetchErr || "Workflow not found");
|
|
1567
|
+
|
|
1568
|
+
// Track which IDs actually exist vs. not found
|
|
1569
|
+
const existingNodeIdSet = new Set(((wf.nodes as any[]) || []).map((n: any) => n.id));
|
|
1570
|
+
const actuallyDeletedIds = nodeIds.filter(id => existingNodeIdSet.has(id));
|
|
1571
|
+
const notFoundIds = nodeIds.filter(id => !existingNodeIdSet.has(id));
|
|
1572
|
+
|
|
1573
|
+
const nodeIdSet = new Set(nodeIds);
|
|
1574
|
+
const nodes = ((wf.nodes as any[]) || []).filter((n: any) => !nodeIdSet.has(n.id));
|
|
1575
|
+
// Also remove edges connected to deleted nodes
|
|
1576
|
+
const edges = ((wf.edges as any[]) || []).filter((e: any) => !nodeIdSet.has(e.source) && !nodeIdSet.has(e.target));
|
|
1577
|
+
|
|
1578
|
+
const removedEdgeCount = ((wf.edges as any[]) || []).length - edges.length;
|
|
1579
|
+
|
|
1580
|
+
const { wf: saved, error: saveErr } = await saveGraph(wfId, nodes, edges);
|
|
1581
|
+
if (saveErr) return err(saveErr);
|
|
1582
|
+
|
|
1583
|
+
return ok({
|
|
1584
|
+
message: `Deleted ${actuallyDeletedIds.length} node(s) and ${removedEdgeCount} connected edge(s)`,
|
|
1585
|
+
deleted_node_ids: actuallyDeletedIds,
|
|
1586
|
+
not_found_ids: notFoundIds.length > 0 ? notFoundIds : undefined,
|
|
1587
|
+
remaining_nodes: nodes.length,
|
|
1588
|
+
remaining_edges: edges.length,
|
|
1589
|
+
workflow: saved,
|
|
1590
|
+
});
|
|
1591
|
+
});
|
|
1592
|
+
}
|
|
1593
|
+
|
|
1594
|
+
case "add_edges": {
|
|
1595
|
+
const wfId = (params.workflow_id || params.id) as string;
|
|
1596
|
+
if (!wfId) return err("Missing required: workflow_id");
|
|
1597
|
+
const newEdges = params.edges as any[];
|
|
1598
|
+
if (!newEdges || !Array.isArray(newEdges) || newEdges.length === 0) return err("Missing required: edges[] (array of edge definitions)");
|
|
1599
|
+
|
|
1600
|
+
return sequentialWorkflowOp(wfId, async () => {
|
|
1601
|
+
const { wf, error: fetchErr } = await fetchWorkflow(wfId);
|
|
1602
|
+
if (fetchErr || !wf) return err(fetchErr || "Workflow not found");
|
|
1603
|
+
|
|
1604
|
+
const existingEdges = (wf.edges as any[]) || [];
|
|
1605
|
+
const nodesList = (wf.nodes as any[]) || [];
|
|
1606
|
+
const nodeIdSet = new Set(nodesList.map((n: any) => n.id));
|
|
1607
|
+
const nodesMap = new Map(nodesList.map((n: any) => [n.id, n]));
|
|
1608
|
+
|
|
1609
|
+
try {
|
|
1610
|
+
// Validate source/target existence AND sourceHandle semantics
|
|
1611
|
+
const normalized = newEdges.map((e: any, i: number) => {
|
|
1612
|
+
if (!nodeIdSet.has(e.source)) throw new Error(`Source node "${e.source}" not found in workflow`);
|
|
1613
|
+
if (!nodeIdSet.has(e.target)) throw new Error(`Target node "${e.target}" not found in workflow`);
|
|
1614
|
+
|
|
1615
|
+
// Validate that condition/loop handles are only used on the correct node types
|
|
1616
|
+
if (e.sourceHandle) {
|
|
1617
|
+
const srcNode = nodesMap.get(e.source);
|
|
1618
|
+
if (e.sourceHandle.startsWith("condition-") && srcNode?.type !== "condition") {
|
|
1619
|
+
throw new Error(`sourceHandle "${e.sourceHandle}" is only valid on condition nodes, but source "${e.source}" is type "${srcNode?.type}"`);
|
|
1620
|
+
}
|
|
1621
|
+
if ((e.sourceHandle === "loop_body" || e.sourceHandle === "done") && srcNode?.type !== "loop") {
|
|
1622
|
+
throw new Error(`sourceHandle "${e.sourceHandle}" is only valid on loop nodes, but source "${e.source}" is type "${srcNode?.type}"`);
|
|
1623
|
+
}
|
|
1624
|
+
}
|
|
1625
|
+
|
|
1626
|
+
return normalizeEdge(e, existingEdges.length + i);
|
|
1627
|
+
});
|
|
1628
|
+
|
|
1629
|
+
const allEdges = [...existingEdges, ...normalized];
|
|
1630
|
+
const { wf: saved, error: saveErr } = await saveGraph(wfId, wf.nodes || [], allEdges);
|
|
1631
|
+
if (saveErr) return err(saveErr);
|
|
1632
|
+
|
|
1633
|
+
return ok({
|
|
1634
|
+
message: `Added ${normalized.length} edge(s)`,
|
|
1635
|
+
added_edges: normalized.map((e: any) => ({ id: e.id, source: e.source, target: e.target })),
|
|
1636
|
+
total_edges: allEdges.length,
|
|
1637
|
+
workflow: saved,
|
|
1638
|
+
});
|
|
1639
|
+
} catch (e: any) {
|
|
1640
|
+
return err(e.message);
|
|
1641
|
+
}
|
|
1642
|
+
});
|
|
1643
|
+
}
|
|
1644
|
+
|
|
1645
|
+
case "delete_edges": {
|
|
1646
|
+
const wfId = (params.workflow_id || params.id) as string;
|
|
1647
|
+
if (!wfId) return err("Missing required: workflow_id");
|
|
1648
|
+
const edgeIds = params.edge_ids as string[];
|
|
1649
|
+
if (!edgeIds || !Array.isArray(edgeIds) || edgeIds.length === 0) return err("Missing required: edge_ids[] (array of edge IDs to delete)");
|
|
1650
|
+
|
|
1651
|
+
return sequentialWorkflowOp(wfId, async () => {
|
|
1652
|
+
const { wf, error: fetchErr } = await fetchWorkflow(wfId);
|
|
1653
|
+
if (fetchErr || !wf) return err(fetchErr || "Workflow not found");
|
|
1654
|
+
|
|
1655
|
+
// Track which IDs actually exist vs. not found
|
|
1656
|
+
const existingEdgeIds = new Set(((wf.edges as any[]) || []).map((e: any) => e.id));
|
|
1657
|
+
const actuallyDeletedIds = edgeIds.filter(id => existingEdgeIds.has(id));
|
|
1658
|
+
const notFoundIds = edgeIds.filter(id => !existingEdgeIds.has(id));
|
|
1659
|
+
|
|
1660
|
+
const edgeIdSet = new Set(edgeIds);
|
|
1661
|
+
const edges = ((wf.edges as any[]) || []).filter((e: any) => !edgeIdSet.has(e.id));
|
|
1662
|
+
|
|
1663
|
+
const { wf: saved, error: saveErr } = await saveGraph(wfId, wf.nodes || [], edges);
|
|
1664
|
+
if (saveErr) return err(saveErr);
|
|
1665
|
+
|
|
1666
|
+
return ok({
|
|
1667
|
+
message: `Deleted ${actuallyDeletedIds.length} edge(s)`,
|
|
1668
|
+
deleted_edge_ids: actuallyDeletedIds,
|
|
1669
|
+
not_found_ids: notFoundIds.length > 0 ? notFoundIds : undefined,
|
|
1670
|
+
remaining_edges: edges.length,
|
|
1671
|
+
workflow: saved,
|
|
1672
|
+
});
|
|
1673
|
+
});
|
|
1674
|
+
}
|
|
1675
|
+
|
|
1676
|
+
case "insert_node_between": {
|
|
1677
|
+
const wfId = (params.workflow_id || params.id) as string;
|
|
1678
|
+
if (!wfId) return err("Missing required: workflow_id");
|
|
1679
|
+
const sourceId = params.source_node_id as string;
|
|
1680
|
+
const targetId = params.target_node_id as string;
|
|
1681
|
+
const newNodeDef = params.node as any;
|
|
1682
|
+
if (!sourceId) return err("Missing required: source_node_id");
|
|
1683
|
+
if (!targetId) return err("Missing required: target_node_id");
|
|
1684
|
+
if (!newNodeDef || typeof newNodeDef !== "object") return err("Missing required: node (the node definition to insert)");
|
|
1685
|
+
|
|
1686
|
+
return sequentialWorkflowOp(wfId, async () => {
|
|
1687
|
+
const { wf, error: fetchErr } = await fetchWorkflow(wfId);
|
|
1688
|
+
if (fetchErr || !wf) return err(fetchErr || "Workflow not found");
|
|
1689
|
+
|
|
1690
|
+
const nodes = (wf.nodes as any[]) || [];
|
|
1691
|
+
const edges = (wf.edges as any[]) || [];
|
|
1692
|
+
|
|
1693
|
+
const sourceNode = nodes.find((n: any) => n.id === sourceId);
|
|
1694
|
+
const targetNode = nodes.find((n: any) => n.id === targetId);
|
|
1695
|
+
if (!sourceNode) return err(`Source node "${sourceId}" not found. Use action "get" to see node IDs.`);
|
|
1696
|
+
if (!targetNode) return err(`Target node "${targetId}" not found. Use action "get" to see node IDs.`);
|
|
1697
|
+
|
|
1698
|
+
const connectingEdge = edges.find((e: any) => e.source === sourceId && e.target === targetId);
|
|
1699
|
+
if (!connectingEdge) return err(`No edge found from "${sourceId}" to "${targetId}". They may not be directly connected.`);
|
|
1700
|
+
|
|
1701
|
+
const midX = ((sourceNode.position?.x || 0) + (targetNode.position?.x || 0)) / 2;
|
|
1702
|
+
const midY = ((sourceNode.position?.y || 0) + (targetNode.position?.y || 0)) / 2;
|
|
1703
|
+
const newNode = normalizeNode(
|
|
1704
|
+
{ ...newNodeDef, position: newNodeDef.position || { x: midX, y: midY } },
|
|
1705
|
+
nodes.length,
|
|
1706
|
+
);
|
|
1707
|
+
|
|
1708
|
+
const updatedEdges = edges.filter((e: any) => e.id !== connectingEdge.id);
|
|
1709
|
+
|
|
1710
|
+
const edgeIn = {
|
|
1711
|
+
id: `edge-${Date.now()}-in`,
|
|
1712
|
+
source: sourceId,
|
|
1713
|
+
target: newNode.id,
|
|
1714
|
+
...(connectingEdge.sourceHandle ? { sourceHandle: connectingEdge.sourceHandle } : {}),
|
|
1715
|
+
};
|
|
1716
|
+
const edgeOut = {
|
|
1717
|
+
id: `edge-${Date.now()}-out`,
|
|
1718
|
+
source: newNode.id,
|
|
1719
|
+
target: targetId,
|
|
1720
|
+
...(connectingEdge.targetHandle ? { targetHandle: connectingEdge.targetHandle } : {}),
|
|
1721
|
+
};
|
|
1722
|
+
|
|
1723
|
+
updatedEdges.push(edgeIn, edgeOut);
|
|
1724
|
+
nodes.push(newNode);
|
|
1725
|
+
|
|
1726
|
+
const { wf: saved, error: saveErr } = await saveGraph(wfId, nodes, updatedEdges);
|
|
1727
|
+
if (saveErr) return err(saveErr);
|
|
1728
|
+
|
|
1729
|
+
return ok({
|
|
1730
|
+
message: `Inserted "${newNode.data.label}" (${newNode.type}) between "${sourceNode.data?.label || sourceId}" and "${targetNode.data?.label || targetId}"`,
|
|
1731
|
+
inserted_node: { id: newNode.id, type: newNode.type, label: newNode.data.label, position: newNode.position },
|
|
1732
|
+
new_edges: [
|
|
1733
|
+
{ id: edgeIn.id, from: sourceId, to: newNode.id },
|
|
1734
|
+
{ id: edgeOut.id, from: newNode.id, to: targetId },
|
|
1735
|
+
],
|
|
1736
|
+
removed_edge: connectingEdge.id,
|
|
1737
|
+
total_nodes: nodes.length,
|
|
1738
|
+
total_edges: updatedEdges.length,
|
|
1739
|
+
workflow: saved,
|
|
1740
|
+
});
|
|
1741
|
+
});
|
|
1742
|
+
}
|
|
1743
|
+
|
|
1411
1744
|
default:
|
|
1412
|
-
return err(`Unknown action "${action}"
|
|
1745
|
+
return err(`Unknown action "${action}". Valid: list, get, create, update, delete, list_runs, trigger, add_nodes, update_node, delete_nodes, add_edges, delete_edges, insert_node_between`);
|
|
1413
1746
|
}
|
|
1414
1747
|
},
|
|
1415
1748
|
});
|