@canaryai/cli 0.2.9 → 0.2.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{chunk-C2PGZRYK.js → chunk-CEW4BDXD.js} +26 -7
- package/dist/chunk-CEW4BDXD.js.map +1 -0
- package/dist/chunk-ERSNYLMZ.js +229 -0
- package/dist/chunk-ERSNYLMZ.js.map +1 -0
- package/dist/{chunk-XGO62PO2.js → chunk-MSMC6UXW.js} +198 -11
- package/dist/{chunk-XGO62PO2.js.map → chunk-MSMC6UXW.js.map} +1 -1
- package/dist/{chunk-LC7ZVXPH.js → chunk-Q7WFBG5C.js} +2 -2
- package/dist/{debug-workflow-I3F36JBL.js → debug-workflow-53ULOFJC.js} +4 -4
- package/dist/{docs-REHST3YB.js → docs-BEE3LOCO.js} +2 -2
- package/dist/{feature-flag-3HB5NTMY.js → feature-flag-CYTDV4ZB.js} +2 -2
- package/dist/index.js +61 -139
- package/dist/index.js.map +1 -1
- package/dist/init-M6I3MG3D.js +146 -0
- package/dist/init-M6I3MG3D.js.map +1 -0
- package/dist/{issues-YU57CHXS.js → issues-NLM72HLU.js} +2 -2
- package/dist/{knobs-QJ4IBLCT.js → knobs-O35GAU5M.js} +2 -2
- package/dist/list-4K4EIGAT.js +57 -0
- package/dist/list-4K4EIGAT.js.map +1 -0
- package/dist/local-NHXXPHZ3.js +63 -0
- package/dist/local-NHXXPHZ3.js.map +1 -0
- package/dist/{local-browser-MKTJ36KY.js → local-browser-VAZORCO3.js} +3 -3
- package/dist/login-ZLP64YQP.js +130 -0
- package/dist/login-ZLP64YQP.js.map +1 -0
- package/dist/{mcp-ZOKM2AUE.js → mcp-ZF5G5DCB.js} +4 -126
- package/dist/mcp-ZF5G5DCB.js.map +1 -0
- package/dist/{record-TNDBT3NY.js → record-V6QKFFH3.js} +6 -47
- package/dist/record-V6QKFFH3.js.map +1 -0
- package/dist/{release-L4IXOHDF.js → release-7TI7EIGD.js} +8 -4
- package/dist/release-7TI7EIGD.js.map +1 -0
- package/dist/{session-RNLKFS2Z.js → session-UGNJXRUW.js} +138 -70
- package/dist/session-UGNJXRUW.js.map +1 -0
- package/dist/skill-ORWAPBDW.js +424 -0
- package/dist/skill-ORWAPBDW.js.map +1 -0
- package/dist/{src-2WSMYBMJ.js → src-4VIDSK4A.js} +2 -2
- package/dist/start-E532F3BU.js +112 -0
- package/dist/start-E532F3BU.js.map +1 -0
- package/dist/workflow-HXIUXRFI.js +613 -0
- package/dist/workflow-HXIUXRFI.js.map +1 -0
- package/package.json +1 -1
- package/dist/chunk-C2PGZRYK.js.map +0 -1
- package/dist/chunk-DXIAHB72.js +0 -340
- package/dist/chunk-DXIAHB72.js.map +0 -1
- package/dist/chunk-QLFSJG5O.js +0 -93
- package/dist/chunk-QLFSJG5O.js.map +0 -1
- package/dist/mcp-ZOKM2AUE.js.map +0 -1
- package/dist/record-TNDBT3NY.js.map +0 -1
- package/dist/release-L4IXOHDF.js.map +0 -1
- package/dist/session-RNLKFS2Z.js.map +0 -1
- package/dist/skill-CZ7SHI3P.js +0 -156
- package/dist/skill-CZ7SHI3P.js.map +0 -1
- /package/dist/{chunk-LC7ZVXPH.js.map → chunk-Q7WFBG5C.js.map} +0 -0
- /package/dist/{debug-workflow-I3F36JBL.js.map → debug-workflow-53ULOFJC.js.map} +0 -0
- /package/dist/{docs-REHST3YB.js.map → docs-BEE3LOCO.js.map} +0 -0
- /package/dist/{feature-flag-3HB5NTMY.js.map → feature-flag-CYTDV4ZB.js.map} +0 -0
- /package/dist/{issues-YU57CHXS.js.map → issues-NLM72HLU.js.map} +0 -0
- /package/dist/{knobs-QJ4IBLCT.js.map → knobs-O35GAU5M.js.map} +0 -0
- /package/dist/{local-browser-MKTJ36KY.js.map → local-browser-VAZORCO3.js.map} +0 -0
- /package/dist/{src-2WSMYBMJ.js.map → src-4VIDSK4A.js.map} +0 -0
|
@@ -0,0 +1,613 @@
|
|
|
1
|
+
import { createRequire as __cr } from "module"; const require = __cr(import.meta.url);
|
|
2
|
+
import {
|
|
3
|
+
apiRequest
|
|
4
|
+
} from "./chunk-ERSNYLMZ.js";
|
|
5
|
+
import {
|
|
6
|
+
getArgValue,
|
|
7
|
+
hasFlag,
|
|
8
|
+
resolveConfig
|
|
9
|
+
} from "./chunk-PWWQGYFG.js";
|
|
10
|
+
import "./chunk-XAA5VQ5N.js";
|
|
11
|
+
import "./chunk-VKVL7WBN.js";
|
|
12
|
+
|
|
13
|
+
// src/workflow.ts
|
|
14
|
+
import fs2 from "fs/promises";
|
|
15
|
+
import process2 from "process";
|
|
16
|
+
|
|
17
|
+
// src/workflow-create.ts
|
|
18
|
+
import fs from "fs/promises";
|
|
19
|
+
import process from "process";
|
|
20
|
+
import { createParser } from "eventsource-parser";
|
|
21
|
+
async function readFromStdin() {
|
|
22
|
+
const chunks = [];
|
|
23
|
+
const stdin = process.stdin;
|
|
24
|
+
if (stdin.isTTY) {
|
|
25
|
+
throw new Error("No input on stdin. Pipe workflow JSON or use --from-file.");
|
|
26
|
+
}
|
|
27
|
+
return new Promise((resolve, reject) => {
|
|
28
|
+
stdin.on("data", (chunk) => chunks.push(chunk));
|
|
29
|
+
stdin.on("end", () => resolve(Buffer.concat(chunks).toString("utf-8")));
|
|
30
|
+
stdin.on("error", reject);
|
|
31
|
+
});
|
|
32
|
+
}
|
|
33
|
+
async function readWorkflowInput(argv) {
|
|
34
|
+
let raw;
|
|
35
|
+
const fromFile = getArgValue(argv, "--from-file");
|
|
36
|
+
if (fromFile) {
|
|
37
|
+
try {
|
|
38
|
+
raw = await fs.readFile(fromFile, "utf-8");
|
|
39
|
+
} catch (err) {
|
|
40
|
+
throw new Error(`Failed to read file ${fromFile}: ${err instanceof Error ? err.message : String(err)}`);
|
|
41
|
+
}
|
|
42
|
+
} else {
|
|
43
|
+
raw = await readFromStdin();
|
|
44
|
+
}
|
|
45
|
+
let parsed;
|
|
46
|
+
try {
|
|
47
|
+
parsed = JSON.parse(raw);
|
|
48
|
+
} catch {
|
|
49
|
+
throw new Error("Invalid JSON input. Please provide valid workflow JSON.");
|
|
50
|
+
}
|
|
51
|
+
const input = parsed;
|
|
52
|
+
if (!input.name || typeof input.name !== "string") {
|
|
53
|
+
throw new Error('Workflow JSON must include a "name" string field.');
|
|
54
|
+
}
|
|
55
|
+
if (!Array.isArray(input.nodes) || input.nodes.length === 0) {
|
|
56
|
+
throw new Error('Workflow JSON must include a non-empty "nodes" array.');
|
|
57
|
+
}
|
|
58
|
+
const validNodeTypes = ["login", "navigate", "action", "assertion", "end", "setup", "seed", "wait"];
|
|
59
|
+
for (const node of input.nodes) {
|
|
60
|
+
if (!node.nodeType || !validNodeTypes.includes(node.nodeType)) {
|
|
61
|
+
throw new Error(
|
|
62
|
+
`Invalid nodeType "${node.nodeType}". Valid types: ${validNodeTypes.join(", ")}`
|
|
63
|
+
);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
return input;
|
|
67
|
+
}
|
|
68
|
+
async function detectSessionContext(apiUrl, token) {
|
|
69
|
+
try {
|
|
70
|
+
const res = await fetch(`${apiUrl}/sessions/status`, {
|
|
71
|
+
headers: { Authorization: `Bearer ${token}` }
|
|
72
|
+
});
|
|
73
|
+
if (!res.ok) return {};
|
|
74
|
+
const data = await res.json();
|
|
75
|
+
const sessions = data.sessions;
|
|
76
|
+
if (!sessions || sessions.length === 0) return {};
|
|
77
|
+
const session = sessions[0];
|
|
78
|
+
return {
|
|
79
|
+
propertyId: session.propertyId,
|
|
80
|
+
credentialId: session.credentialId
|
|
81
|
+
};
|
|
82
|
+
} catch {
|
|
83
|
+
return {};
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
async function runAndPublish(opts) {
|
|
87
|
+
const { apiUrl, token, workflowId, workflowName } = opts;
|
|
88
|
+
const body = {};
|
|
89
|
+
if (opts.propertyId) body.propertyId = opts.propertyId;
|
|
90
|
+
if (opts.environmentId) body.environmentId = opts.environmentId;
|
|
91
|
+
const triggerUrl = `${apiUrl}/workflows/test-runs?namePattern=${encodeURIComponent(workflowName)}`;
|
|
92
|
+
let triggerRes;
|
|
93
|
+
try {
|
|
94
|
+
triggerRes = await fetch(triggerUrl, {
|
|
95
|
+
method: "POST",
|
|
96
|
+
headers: {
|
|
97
|
+
Authorization: `Bearer ${token}`,
|
|
98
|
+
"Content-Type": "application/json"
|
|
99
|
+
},
|
|
100
|
+
body: JSON.stringify(body)
|
|
101
|
+
});
|
|
102
|
+
} catch (err) {
|
|
103
|
+
console.error(`Failed to trigger test run: ${err}`);
|
|
104
|
+
return false;
|
|
105
|
+
}
|
|
106
|
+
if (!triggerRes.ok) {
|
|
107
|
+
const errorText = await triggerRes.text();
|
|
108
|
+
console.error(`Failed to trigger test run: ${triggerRes.status} ${errorText}`);
|
|
109
|
+
return false;
|
|
110
|
+
}
|
|
111
|
+
const triggerData = await triggerRes.json();
|
|
112
|
+
if (!triggerData.ok || !triggerData.suiteId) {
|
|
113
|
+
console.error(`Failed to trigger test run: ${triggerData.error ?? "Unknown error"}`);
|
|
114
|
+
return false;
|
|
115
|
+
}
|
|
116
|
+
const { suiteId } = triggerData;
|
|
117
|
+
console.log(`Test run started (suite: ${suiteId})`);
|
|
118
|
+
console.log("Streaming results...\n");
|
|
119
|
+
const streamUrl = `${apiUrl}/workflows/test-runs/stream?suiteId=${suiteId}`;
|
|
120
|
+
let streamRes;
|
|
121
|
+
try {
|
|
122
|
+
streamRes = await fetch(streamUrl, {
|
|
123
|
+
headers: {
|
|
124
|
+
Authorization: `Bearer ${token}`,
|
|
125
|
+
Accept: "text/event-stream"
|
|
126
|
+
}
|
|
127
|
+
});
|
|
128
|
+
} catch (err) {
|
|
129
|
+
console.error(`Failed to connect to event stream: ${err}`);
|
|
130
|
+
return false;
|
|
131
|
+
}
|
|
132
|
+
if (!streamRes.ok || !streamRes.body) {
|
|
133
|
+
console.error(`Failed to connect to event stream: ${streamRes.status}`);
|
|
134
|
+
return false;
|
|
135
|
+
}
|
|
136
|
+
let success = false;
|
|
137
|
+
let hasCompleted = false;
|
|
138
|
+
const parser = createParser({
|
|
139
|
+
onEvent: (event) => {
|
|
140
|
+
if (!event.data) return;
|
|
141
|
+
try {
|
|
142
|
+
const data = JSON.parse(event.data);
|
|
143
|
+
if (event.event === "workflow-test") {
|
|
144
|
+
const testEvent = data;
|
|
145
|
+
if (testEvent.status === "success") {
|
|
146
|
+
console.log(` \u2713 ${workflowName}`);
|
|
147
|
+
success = true;
|
|
148
|
+
} else if (testEvent.status === "failed") {
|
|
149
|
+
console.log(` \u2717 ${workflowName}`);
|
|
150
|
+
if (testEvent.errorMessage) {
|
|
151
|
+
console.log(` Error: ${testEvent.errorMessage.slice(0, 200)}`);
|
|
152
|
+
}
|
|
153
|
+
success = false;
|
|
154
|
+
} else if (testEvent.status === "running") {
|
|
155
|
+
console.log(` \u25B6 Running ${workflowName}...`);
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
if (event.event === "workflow-test-suite") {
|
|
159
|
+
const suiteEvent = data;
|
|
160
|
+
if (suiteEvent.status === "completed") {
|
|
161
|
+
hasCompleted = true;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
} catch {
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
});
|
|
168
|
+
const reader = streamRes.body.getReader();
|
|
169
|
+
const decoder = new TextDecoder();
|
|
170
|
+
try {
|
|
171
|
+
while (!hasCompleted) {
|
|
172
|
+
const { done, value } = await reader.read();
|
|
173
|
+
if (done) break;
|
|
174
|
+
parser.feed(decoder.decode(value, { stream: true }));
|
|
175
|
+
}
|
|
176
|
+
} finally {
|
|
177
|
+
reader.releaseLock();
|
|
178
|
+
}
|
|
179
|
+
if (success) {
|
|
180
|
+
try {
|
|
181
|
+
const publishRes = await fetch(`${apiUrl}/workflows/${workflowId}/publish`, {
|
|
182
|
+
method: "POST",
|
|
183
|
+
headers: {
|
|
184
|
+
Authorization: `Bearer ${token}`,
|
|
185
|
+
"Content-Type": "application/json"
|
|
186
|
+
}
|
|
187
|
+
});
|
|
188
|
+
if (publishRes.ok) {
|
|
189
|
+
console.log(`
|
|
190
|
+
\u2713 Workflow published successfully`);
|
|
191
|
+
return true;
|
|
192
|
+
} else {
|
|
193
|
+
console.error(`
|
|
194
|
+
Failed to publish workflow: ${publishRes.status}`);
|
|
195
|
+
return false;
|
|
196
|
+
}
|
|
197
|
+
} catch (err) {
|
|
198
|
+
console.error(`
|
|
199
|
+
Failed to publish workflow: ${err}`);
|
|
200
|
+
return false;
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
console.log(`
|
|
204
|
+
\u2717 Test failed \u2014 workflow left in draft for manual review`);
|
|
205
|
+
return false;
|
|
206
|
+
}
|
|
207
|
+
async function handleWorkflowCreate(argv, apiUrl, token) {
|
|
208
|
+
const quarantine = hasFlag(argv, "--quarantine");
|
|
209
|
+
const runAndPublishFlag = hasFlag(argv, "--run-and-publish");
|
|
210
|
+
const propertyArg = getArgValue(argv, "--property");
|
|
211
|
+
const environmentArg = getArgValue(argv, "--environment");
|
|
212
|
+
if (quarantine && runAndPublishFlag) {
|
|
213
|
+
console.error("Error: --quarantine and --run-and-publish are mutually exclusive.");
|
|
214
|
+
process.exit(1);
|
|
215
|
+
}
|
|
216
|
+
let input;
|
|
217
|
+
try {
|
|
218
|
+
input = await readWorkflowInput(argv);
|
|
219
|
+
} catch (err) {
|
|
220
|
+
console.error(`Error: ${err instanceof Error ? err.message : String(err)}`);
|
|
221
|
+
process.exit(1);
|
|
222
|
+
}
|
|
223
|
+
if (!input.propertyId && !input.credentialId) {
|
|
224
|
+
const sessionCtx = await detectSessionContext(apiUrl, token);
|
|
225
|
+
if (sessionCtx.propertyId) {
|
|
226
|
+
input.propertyId = sessionCtx.propertyId;
|
|
227
|
+
console.log(`Auto-linked property from active session`);
|
|
228
|
+
}
|
|
229
|
+
if (sessionCtx.credentialId) {
|
|
230
|
+
input.credentialId = sessionCtx.credentialId;
|
|
231
|
+
console.log(`Auto-linked credential from active session`);
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
if (quarantine) {
|
|
235
|
+
input.status = "quarantined";
|
|
236
|
+
}
|
|
237
|
+
input.createdVia = "cli";
|
|
238
|
+
const res = await fetch(`${apiUrl}/workflows/create-with-nodes`, {
|
|
239
|
+
method: "POST",
|
|
240
|
+
headers: {
|
|
241
|
+
Authorization: `Bearer ${token}`,
|
|
242
|
+
"Content-Type": "application/json"
|
|
243
|
+
},
|
|
244
|
+
body: JSON.stringify(input)
|
|
245
|
+
});
|
|
246
|
+
if (res.status === 401) {
|
|
247
|
+
console.error("Error: Unauthorized. Your session may have expired.");
|
|
248
|
+
console.error("Run: canary login");
|
|
249
|
+
process.exit(1);
|
|
250
|
+
}
|
|
251
|
+
const result = await res.json();
|
|
252
|
+
if (!result.ok || !result.workflow) {
|
|
253
|
+
console.error(`Error: ${result.error ?? "Failed to create workflow"}`);
|
|
254
|
+
process.exit(1);
|
|
255
|
+
}
|
|
256
|
+
const { workflow, nodes, edges } = result;
|
|
257
|
+
console.log(`
|
|
258
|
+
\u2713 Workflow created: ${workflow.name}`);
|
|
259
|
+
console.log(` ID: ${workflow.id}`);
|
|
260
|
+
console.log(` Status: ${workflow.status}`);
|
|
261
|
+
console.log(` Nodes: ${nodes?.length ?? 0}`);
|
|
262
|
+
console.log(` Edges: ${edges?.length ?? 0}`);
|
|
263
|
+
const webUrl = apiUrl.replace(/\/api\b|api\./, "app.").replace(/:\d+$/, ":5173");
|
|
264
|
+
console.log(` URL: ${webUrl}/workflows/${workflow.id}`);
|
|
265
|
+
if (runAndPublishFlag) {
|
|
266
|
+
console.log("\nTriggering test run...");
|
|
267
|
+
const passed = await runAndPublish({
|
|
268
|
+
apiUrl,
|
|
269
|
+
token,
|
|
270
|
+
workflowId: workflow.id,
|
|
271
|
+
workflowName: workflow.name,
|
|
272
|
+
propertyId: propertyArg ?? input.propertyId,
|
|
273
|
+
environmentId: environmentArg
|
|
274
|
+
});
|
|
275
|
+
if (!passed) {
|
|
276
|
+
process.exit(1);
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
// src/workflow.ts
|
|
282
|
+
function truncate(text, max) {
|
|
283
|
+
if (text.length <= max) return text;
|
|
284
|
+
return text.slice(0, max - 1) + "\u2026";
|
|
285
|
+
}
|
|
286
|
+
function getNodeTitle(node) {
|
|
287
|
+
const c = node.configJson;
|
|
288
|
+
switch (node.nodeType) {
|
|
289
|
+
case "login":
|
|
290
|
+
return c.credentialName || "Login";
|
|
291
|
+
case "navigate":
|
|
292
|
+
return c.pageTitle || truncate(c.pageUrl || "Navigate", 40);
|
|
293
|
+
case "action":
|
|
294
|
+
return c.customActionTitle || truncate(c.actionDescription || "Action", 50);
|
|
295
|
+
case "assertion":
|
|
296
|
+
return truncate(c.condition || "Assertion", 50);
|
|
297
|
+
case "setup":
|
|
298
|
+
return c.setupFlowName || "Setup";
|
|
299
|
+
case "seed":
|
|
300
|
+
return c.seedWorkflowName || "Seed";
|
|
301
|
+
case "wait":
|
|
302
|
+
return "Wait";
|
|
303
|
+
case "condition":
|
|
304
|
+
return truncate(c.conditionDescription || "Condition", 50);
|
|
305
|
+
case "end":
|
|
306
|
+
return "End";
|
|
307
|
+
case "api_sequence":
|
|
308
|
+
return "API Sequence";
|
|
309
|
+
default:
|
|
310
|
+
return node.nodeType;
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
function getCleanConfig(node) {
|
|
314
|
+
const c = node.configJson;
|
|
315
|
+
switch (node.nodeType) {
|
|
316
|
+
case "login":
|
|
317
|
+
return pick(c, ["credentialName", "performLogin"]);
|
|
318
|
+
case "navigate":
|
|
319
|
+
return {
|
|
320
|
+
...pick(c, ["pageUrl", "pageTitle"]),
|
|
321
|
+
...c.openInNewTab ? { openInNewTab: true } : {}
|
|
322
|
+
};
|
|
323
|
+
case "action":
|
|
324
|
+
return pick(c, [
|
|
325
|
+
"actionDescription",
|
|
326
|
+
"customActionInstructions",
|
|
327
|
+
"playbookSteps",
|
|
328
|
+
"inputValues",
|
|
329
|
+
"declaredOutputs"
|
|
330
|
+
]);
|
|
331
|
+
case "assertion":
|
|
332
|
+
return pick(c, ["condition", "expectedOutcome", "strict", "alwaysUseAgent"]);
|
|
333
|
+
case "setup":
|
|
334
|
+
return pick(c, ["setupFlowId", "setupFlowName"]);
|
|
335
|
+
case "seed":
|
|
336
|
+
return pick(c, ["seedWorkflowId", "seedWorkflowName", "snapshotVersion"]);
|
|
337
|
+
case "end":
|
|
338
|
+
return pick(c, ["outcome"]);
|
|
339
|
+
case "wait":
|
|
340
|
+
case "condition":
|
|
341
|
+
case "api_sequence":
|
|
342
|
+
return c;
|
|
343
|
+
default:
|
|
344
|
+
return c;
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
function pick(obj, keys) {
|
|
348
|
+
const result = {};
|
|
349
|
+
for (const key of keys) {
|
|
350
|
+
if (obj[key] !== void 0 && obj[key] !== null) {
|
|
351
|
+
result[key] = obj[key];
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
return result;
|
|
355
|
+
}
|
|
356
|
+
function formatWorkflowMarkdown(workflow) {
|
|
357
|
+
const sections = [];
|
|
358
|
+
sections.push(`# ${workflow.name}`);
|
|
359
|
+
const meta = [`**ID:** ${workflow.id}`, `**Status:** ${workflow.status}`, `**Type:** ${workflow.flowType}`];
|
|
360
|
+
sections.push(meta.join(" | "));
|
|
361
|
+
if (workflow.description) {
|
|
362
|
+
sections.push(`**Description:** ${workflow.description}`);
|
|
363
|
+
}
|
|
364
|
+
sections.push("---");
|
|
365
|
+
const sorted = [...workflow.nodes].sort((a, b) => a.orderIndex - b.orderIndex);
|
|
366
|
+
for (let i = 0; i < sorted.length; i++) {
|
|
367
|
+
const node = sorted[i];
|
|
368
|
+
const title = getNodeTitle(node);
|
|
369
|
+
const stepLines = [];
|
|
370
|
+
stepLines.push(`## Step ${i + 1}: ${title}`);
|
|
371
|
+
stepLines.push(`**Type:** ${node.nodeType}`);
|
|
372
|
+
const c = node.configJson;
|
|
373
|
+
switch (node.nodeType) {
|
|
374
|
+
case "login": {
|
|
375
|
+
if (c.credentialName) stepLines.push(`**Credential:** ${c.credentialName}`);
|
|
376
|
+
if (c.performLogin !== void 0) stepLines.push(`**Perform Login:** ${c.performLogin ? "Yes" : "No"}`);
|
|
377
|
+
break;
|
|
378
|
+
}
|
|
379
|
+
case "navigate": {
|
|
380
|
+
if (c.pageUrl) stepLines.push(`**URL:** ${c.pageUrl}`);
|
|
381
|
+
if (c.openInNewTab) stepLines.push(`**Open in New Tab:** Yes`);
|
|
382
|
+
break;
|
|
383
|
+
}
|
|
384
|
+
case "action": {
|
|
385
|
+
const instructions = c.customActionInstructions || c.actionDescription;
|
|
386
|
+
if (instructions) stepLines.push(`**Instructions:** ${instructions}`);
|
|
387
|
+
const playbook = c.playbookSteps;
|
|
388
|
+
if (playbook && playbook.length > 0) {
|
|
389
|
+
stepLines.push("");
|
|
390
|
+
stepLines.push("**Playbook:**");
|
|
391
|
+
for (const step of playbook) {
|
|
392
|
+
stepLines.push(`${step.order}. [${step.action}] ${step.target}`);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
break;
|
|
396
|
+
}
|
|
397
|
+
case "assertion": {
|
|
398
|
+
if (c.condition) stepLines.push(`**Condition:** ${c.condition}`);
|
|
399
|
+
if (c.strict) stepLines.push(`**Strict:** Yes`);
|
|
400
|
+
break;
|
|
401
|
+
}
|
|
402
|
+
case "setup": {
|
|
403
|
+
if (c.setupFlowName) stepLines.push(`**Setup Flow:** ${c.setupFlowName}`);
|
|
404
|
+
break;
|
|
405
|
+
}
|
|
406
|
+
case "seed": {
|
|
407
|
+
if (c.seedWorkflowName) stepLines.push(`**Seed Workflow:** ${c.seedWorkflowName}`);
|
|
408
|
+
break;
|
|
409
|
+
}
|
|
410
|
+
case "wait": {
|
|
411
|
+
if (c.waitDuration) stepLines.push(`**Duration:** ${c.waitDuration}`);
|
|
412
|
+
break;
|
|
413
|
+
}
|
|
414
|
+
case "condition": {
|
|
415
|
+
if (c.conditionDescription) stepLines.push(`**Condition:** ${c.conditionDescription}`);
|
|
416
|
+
break;
|
|
417
|
+
}
|
|
418
|
+
case "end": {
|
|
419
|
+
if (c.outcome) stepLines.push(`**Outcome:** ${c.outcome}`);
|
|
420
|
+
break;
|
|
421
|
+
}
|
|
422
|
+
}
|
|
423
|
+
sections.push(stepLines.join("\n"));
|
|
424
|
+
}
|
|
425
|
+
return sections.join("\n\n");
|
|
426
|
+
}
|
|
427
|
+
function formatWorkflowJson(workflow) {
|
|
428
|
+
const sorted = [...workflow.nodes].sort((a, b) => a.orderIndex - b.orderIndex);
|
|
429
|
+
return {
|
|
430
|
+
workflow: {
|
|
431
|
+
id: workflow.id,
|
|
432
|
+
name: workflow.name,
|
|
433
|
+
description: workflow.description,
|
|
434
|
+
status: workflow.status,
|
|
435
|
+
flowType: workflow.flowType
|
|
436
|
+
},
|
|
437
|
+
steps: sorted.map((node, i) => ({
|
|
438
|
+
index: i + 1,
|
|
439
|
+
nodeId: node.id,
|
|
440
|
+
nodeType: node.nodeType,
|
|
441
|
+
title: getNodeTitle(node),
|
|
442
|
+
config: getCleanConfig(node)
|
|
443
|
+
}))
|
|
444
|
+
};
|
|
445
|
+
}
|
|
446
|
+
async function handleList(argv, apiUrl, token) {
|
|
447
|
+
const jsonOutput = hasFlag(argv, "--json");
|
|
448
|
+
const params = new URLSearchParams();
|
|
449
|
+
const search = getArgValue(argv, "--search");
|
|
450
|
+
const flowType = getArgValue(argv, "--flow-type");
|
|
451
|
+
const status = getArgValue(argv, "--status");
|
|
452
|
+
const page = getArgValue(argv, "--page");
|
|
453
|
+
const pageSize = getArgValue(argv, "--page-size");
|
|
454
|
+
if (search) params.set("search", search);
|
|
455
|
+
if (flowType) params.set("flowType", flowType);
|
|
456
|
+
if (status) params.set("status", status);
|
|
457
|
+
if (page) params.set("page", page);
|
|
458
|
+
if (pageSize) params.set("pageSize", pageSize);
|
|
459
|
+
const qs = params.toString();
|
|
460
|
+
const path = `/workflows${qs ? `?${qs}` : ""}`;
|
|
461
|
+
const result = await apiRequest(apiUrl, token, "GET", path);
|
|
462
|
+
if (!result.ok) {
|
|
463
|
+
console.error(`Error: ${result.error}`);
|
|
464
|
+
process2.exit(1);
|
|
465
|
+
}
|
|
466
|
+
if (jsonOutput) {
|
|
467
|
+
console.log(JSON.stringify({ data: result.data, pagination: result.pagination }, null, 2));
|
|
468
|
+
return;
|
|
469
|
+
}
|
|
470
|
+
const { data, pagination } = result;
|
|
471
|
+
console.log(`Workflows: ${pagination.totalItems} total (page ${pagination.page}/${pagination.totalPages})
|
|
472
|
+
`);
|
|
473
|
+
if (data.length === 0) {
|
|
474
|
+
console.log("No workflows found.");
|
|
475
|
+
return;
|
|
476
|
+
}
|
|
477
|
+
const idW = 36;
|
|
478
|
+
const nameW = 30;
|
|
479
|
+
const statusW = 12;
|
|
480
|
+
const typeW = 10;
|
|
481
|
+
const header = [
|
|
482
|
+
"ID".padEnd(idW),
|
|
483
|
+
"Name".padEnd(nameW),
|
|
484
|
+
"Status".padEnd(statusW),
|
|
485
|
+
"Type".padEnd(typeW)
|
|
486
|
+
].join(" ");
|
|
487
|
+
console.log(header);
|
|
488
|
+
for (const w of data) {
|
|
489
|
+
const row = [
|
|
490
|
+
w.id.padEnd(idW),
|
|
491
|
+
truncate(w.name, nameW).padEnd(nameW),
|
|
492
|
+
w.status.padEnd(statusW),
|
|
493
|
+
w.flowType.padEnd(typeW)
|
|
494
|
+
].join(" ");
|
|
495
|
+
console.log(row);
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
async function handleGet(argv, apiUrl, token) {
|
|
499
|
+
const workflowId = argv[0];
|
|
500
|
+
if (!workflowId || workflowId.startsWith("--")) {
|
|
501
|
+
console.error("Error: Missing workflow ID.");
|
|
502
|
+
console.error("Usage: canary workflow get <workflowId>");
|
|
503
|
+
process2.exit(1);
|
|
504
|
+
}
|
|
505
|
+
const jsonOutput = hasFlag(argv, "--json") || getArgValue(argv, "--format") === "json";
|
|
506
|
+
const markdownOutput = hasFlag(argv, "--markdown") || getArgValue(argv, "--format") === "markdown";
|
|
507
|
+
const outputFile = getArgValue(argv, "--output");
|
|
508
|
+
const res = await fetch(`${apiUrl}/workflows/${workflowId}`, {
|
|
509
|
+
headers: { Authorization: `Bearer ${token}`, "Content-Type": "application/json" }
|
|
510
|
+
});
|
|
511
|
+
if (res.status === 401) {
|
|
512
|
+
console.error("Error: Unauthorized. Your session may have expired.");
|
|
513
|
+
console.error("Run: canary login");
|
|
514
|
+
process2.exit(1);
|
|
515
|
+
}
|
|
516
|
+
const result = await res.json();
|
|
517
|
+
if (result.error === "WORKFLOW_NOT_FOUND") {
|
|
518
|
+
console.error("Error: Workflow not found.");
|
|
519
|
+
process2.exit(1);
|
|
520
|
+
}
|
|
521
|
+
if (result.error === "WORKFLOW_WRONG_ORG") {
|
|
522
|
+
console.error(`Error: Workflow belongs to another org: ${result.targetOrgName ?? result.targetOrgId}`);
|
|
523
|
+
process2.exit(1);
|
|
524
|
+
}
|
|
525
|
+
if (!result.workflow) {
|
|
526
|
+
console.error(`Error: ${result.error ?? "Unexpected response"}`);
|
|
527
|
+
process2.exit(1);
|
|
528
|
+
}
|
|
529
|
+
const workflow = result.workflow;
|
|
530
|
+
let output;
|
|
531
|
+
if (markdownOutput) {
|
|
532
|
+
output = formatWorkflowMarkdown(workflow);
|
|
533
|
+
} else {
|
|
534
|
+
output = JSON.stringify(formatWorkflowJson(workflow), null, 2);
|
|
535
|
+
}
|
|
536
|
+
if (outputFile) {
|
|
537
|
+
try {
|
|
538
|
+
await fs2.writeFile(outputFile, output, "utf-8");
|
|
539
|
+
console.log(`Written to ${outputFile}`);
|
|
540
|
+
} catch (err) {
|
|
541
|
+
console.error(`Error writing to ${outputFile}: ${err instanceof Error ? err.message : String(err)}`);
|
|
542
|
+
process2.exit(1);
|
|
543
|
+
}
|
|
544
|
+
return;
|
|
545
|
+
}
|
|
546
|
+
console.log(output);
|
|
547
|
+
}
|
|
548
|
+
function printWorkflowHelp() {
|
|
549
|
+
console.log(
|
|
550
|
+
[
|
|
551
|
+
"Usage: canary workflow <sub-command> [options]",
|
|
552
|
+
"",
|
|
553
|
+
"Sub-commands:",
|
|
554
|
+
" list [options] List workflows",
|
|
555
|
+
" get <workflowId> [options] Get workflow definition with steps",
|
|
556
|
+
" create [options] Create workflow from JSON (stdin or file)",
|
|
557
|
+
"",
|
|
558
|
+
"List options:",
|
|
559
|
+
" --search <query> Search by name",
|
|
560
|
+
" --flow-type <type> Filter: standard, setup, seed, teardown",
|
|
561
|
+
" --status <status> Filter: draft, published, archived",
|
|
562
|
+
" --page <n> Page number (default: 1)",
|
|
563
|
+
" --page-size <n> Page size (default: 25)",
|
|
564
|
+
" --json Output raw JSON",
|
|
565
|
+
"",
|
|
566
|
+
"Get options:",
|
|
567
|
+
" --format json|markdown Output format (default: json)",
|
|
568
|
+
" --json Shorthand for --format json",
|
|
569
|
+
" --markdown Shorthand for --format markdown",
|
|
570
|
+
" --output <file> Write output to file",
|
|
571
|
+
"",
|
|
572
|
+
"Create options:",
|
|
573
|
+
" --from-stdin Read workflow JSON from stdin (default)",
|
|
574
|
+
" --from-file <path> Read workflow JSON from file",
|
|
575
|
+
" --quarantine Create with quarantined status (auto-promotes after 3 passes)",
|
|
576
|
+
" --run-and-publish Create as draft, run test, publish on success",
|
|
577
|
+
" --property <name|id> Property for run-and-publish",
|
|
578
|
+
" --environment <name|id> Environment for run-and-publish",
|
|
579
|
+
"",
|
|
580
|
+
"Common options:",
|
|
581
|
+
" --env <env> Target environment (prod, dev, local)",
|
|
582
|
+
" --api-url <url> API URL override",
|
|
583
|
+
" --token <key> API token override"
|
|
584
|
+
].join("\n")
|
|
585
|
+
);
|
|
586
|
+
}
|
|
587
|
+
async function runWorkflow(argv) {
|
|
588
|
+
const [subCommand, ...rest] = argv;
|
|
589
|
+
if (!subCommand || subCommand === "help" || hasFlag(argv, "--help", "-h")) {
|
|
590
|
+
printWorkflowHelp();
|
|
591
|
+
return;
|
|
592
|
+
}
|
|
593
|
+
const { apiUrl, token } = await resolveConfig(argv);
|
|
594
|
+
switch (subCommand) {
|
|
595
|
+
case "list":
|
|
596
|
+
await handleList(rest, apiUrl, token);
|
|
597
|
+
break;
|
|
598
|
+
case "get":
|
|
599
|
+
await handleGet(rest, apiUrl, token);
|
|
600
|
+
break;
|
|
601
|
+
case "create":
|
|
602
|
+
await handleWorkflowCreate(rest, apiUrl, token);
|
|
603
|
+
break;
|
|
604
|
+
default:
|
|
605
|
+
console.error(`Unknown sub-command: ${subCommand}`);
|
|
606
|
+
printWorkflowHelp();
|
|
607
|
+
process2.exit(1);
|
|
608
|
+
}
|
|
609
|
+
}
|
|
610
|
+
export {
|
|
611
|
+
runWorkflow
|
|
612
|
+
};
|
|
613
|
+
//# sourceMappingURL=workflow-HXIUXRFI.js.map
|