chainlesschain 0.51.0 → 0.66.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/assets/web-panel/.build-hash +1 -1
- package/src/assets/web-panel/assets/{AppLayout-Rvi759IS.js → AppLayout-6SPt_8Y_.js} +1 -1
- package/src/assets/web-panel/assets/{Dashboard-DBhFxXYQ.js → Dashboard-Br7kCwKJ.js} +2 -2
- package/src/assets/web-panel/assets/Dashboard-CKeMmCoT.css +1 -0
- package/src/assets/web-panel/assets/{index-uL0cZ8N_.js → index-tN-8TosE.js} +2 -2
- package/src/assets/web-panel/index.html +2 -2
- package/src/commands/agent-network.js +785 -0
- package/src/commands/automation.js +654 -0
- package/src/commands/dao.js +565 -0
- package/src/commands/did-v2.js +620 -0
- package/src/commands/economy.js +578 -0
- package/src/commands/evolution.js +391 -0
- package/src/commands/hmemory.js +442 -0
- package/src/commands/perf.js +433 -0
- package/src/commands/pipeline.js +449 -0
- package/src/commands/plugin-ecosystem.js +517 -0
- package/src/commands/sandbox.js +401 -0
- package/src/commands/social.js +311 -0
- package/src/commands/sso.js +798 -0
- package/src/commands/workflow.js +320 -0
- package/src/commands/zkp.js +227 -1
- package/src/index.js +21 -0
- package/src/lib/agent-economy.js +479 -0
- package/src/lib/agent-network.js +1121 -0
- package/src/lib/automation-engine.js +948 -0
- package/src/lib/dao-governance.js +569 -0
- package/src/lib/did-v2-manager.js +1127 -0
- package/src/lib/evolution-system.js +453 -0
- package/src/lib/hierarchical-memory.js +481 -0
- package/src/lib/perf-tuning.js +734 -0
- package/src/lib/pipeline-orchestrator.js +928 -0
- package/src/lib/plugin-ecosystem.js +1109 -0
- package/src/lib/sandbox-v2.js +306 -0
- package/src/lib/social-graph-analytics.js +707 -0
- package/src/lib/sso-manager.js +841 -0
- package/src/lib/workflow-engine.js +454 -1
- package/src/lib/zkp-engine.js +249 -20
- package/src/assets/web-panel/assets/Dashboard-BS-tzGNj.css +0 -1
|
@@ -0,0 +1,654 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Automation Engine commands — Phase 96 工作流自动化引擎 CLI.
|
|
3
|
+
* `cc automation ...` (alias `cc auto`).
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import chalk from "chalk";
|
|
7
|
+
import { logger } from "../lib/logger.js";
|
|
8
|
+
import { bootstrap, shutdown } from "../runtime/bootstrap.js";
|
|
9
|
+
import {
|
|
10
|
+
ensureAutomationTables,
|
|
11
|
+
listConnectors,
|
|
12
|
+
getConnector,
|
|
13
|
+
listFlowTemplates,
|
|
14
|
+
getFlowTemplate,
|
|
15
|
+
createFlow,
|
|
16
|
+
getFlow,
|
|
17
|
+
listFlows,
|
|
18
|
+
updateFlowStatus,
|
|
19
|
+
deleteFlow,
|
|
20
|
+
scheduleFlow,
|
|
21
|
+
shareFlow,
|
|
22
|
+
importTemplate,
|
|
23
|
+
addTrigger,
|
|
24
|
+
listTriggers,
|
|
25
|
+
getTrigger,
|
|
26
|
+
setTriggerEnabled,
|
|
27
|
+
executeFlow,
|
|
28
|
+
fireTrigger,
|
|
29
|
+
getExecution,
|
|
30
|
+
listExecutions,
|
|
31
|
+
getStats,
|
|
32
|
+
getConfig,
|
|
33
|
+
FLOW_STATUS,
|
|
34
|
+
EXECUTION_STATUS,
|
|
35
|
+
TRIGGER_TYPE,
|
|
36
|
+
} from "../lib/automation-engine.js";
|
|
37
|
+
|
|
38
|
+
function _dbFromCtx(cmd) {
|
|
39
|
+
const root = cmd?.parent?.parent ?? cmd?.parent;
|
|
40
|
+
return root?._db;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
async function _prepare(cmd) {
|
|
44
|
+
const verbose = cmd?.parent?.parent?.opts?.()?.verbose;
|
|
45
|
+
const ctx = await bootstrap({ verbose });
|
|
46
|
+
if (!ctx.db) {
|
|
47
|
+
logger.error("Database not available");
|
|
48
|
+
process.exit(1);
|
|
49
|
+
}
|
|
50
|
+
const db = ctx.db.getDatabase();
|
|
51
|
+
ensureAutomationTables(db);
|
|
52
|
+
return db;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function _parseJsonArg(value, label) {
|
|
56
|
+
if (!value) return undefined;
|
|
57
|
+
try {
|
|
58
|
+
return JSON.parse(value);
|
|
59
|
+
} catch (_e) {
|
|
60
|
+
throw new Error(`Invalid JSON for ${label}`);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export function registerAutomationCommand(program) {
|
|
65
|
+
const make = (name) => {
|
|
66
|
+
const cmd = program
|
|
67
|
+
.command(name)
|
|
68
|
+
.description(
|
|
69
|
+
"Workflow automation engine — 12 SaaS connectors + triggers (Phase 96)",
|
|
70
|
+
)
|
|
71
|
+
.hook("preAction", async (thisCommand) => {
|
|
72
|
+
const db = await _prepare(thisCommand);
|
|
73
|
+
thisCommand._db = db;
|
|
74
|
+
});
|
|
75
|
+
return cmd;
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
const automation = make("automation");
|
|
79
|
+
const auto = make("auto");
|
|
80
|
+
|
|
81
|
+
for (const root of [automation, auto]) {
|
|
82
|
+
_wire(root);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
function _wire(root) {
|
|
87
|
+
// ─── Catalog ──────────────────────────────────────────────
|
|
88
|
+
|
|
89
|
+
root
|
|
90
|
+
.command("connectors")
|
|
91
|
+
.description("List 12 built-in SaaS connectors")
|
|
92
|
+
.option("--json", "Output as JSON")
|
|
93
|
+
.action((options) => {
|
|
94
|
+
const list = listConnectors();
|
|
95
|
+
if (options.json) {
|
|
96
|
+
console.log(JSON.stringify(list, null, 2));
|
|
97
|
+
return;
|
|
98
|
+
}
|
|
99
|
+
logger.info(`${list.length} connectors`);
|
|
100
|
+
for (const c of list) {
|
|
101
|
+
logger.log(
|
|
102
|
+
` ${chalk.cyan(c.id.padEnd(12))} ${chalk.dim(c.category.padEnd(10))} ${c.actions.join(", ")}`,
|
|
103
|
+
);
|
|
104
|
+
}
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
root
|
|
108
|
+
.command("trigger-types")
|
|
109
|
+
.description("List trigger types")
|
|
110
|
+
.option("--json", "Output as JSON")
|
|
111
|
+
.action((options) => {
|
|
112
|
+
const types = Object.values(TRIGGER_TYPE);
|
|
113
|
+
if (options.json) {
|
|
114
|
+
console.log(JSON.stringify(types, null, 2));
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
for (const t of types) logger.log(` ${chalk.cyan(t)}`);
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
root
|
|
121
|
+
.command("statuses")
|
|
122
|
+
.description("List flow and execution statuses")
|
|
123
|
+
.action(() => {
|
|
124
|
+
logger.log(chalk.bold("Flow statuses:"));
|
|
125
|
+
for (const s of Object.values(FLOW_STATUS)) logger.log(` ${s}`);
|
|
126
|
+
logger.log(chalk.bold("Execution statuses:"));
|
|
127
|
+
for (const s of Object.values(EXECUTION_STATUS)) logger.log(` ${s}`);
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
root
|
|
131
|
+
.command("config")
|
|
132
|
+
.description("Show automation engine config")
|
|
133
|
+
.option("--json", "Output as JSON")
|
|
134
|
+
.action((options) => {
|
|
135
|
+
const cfg = getConfig();
|
|
136
|
+
if (options.json) {
|
|
137
|
+
console.log(JSON.stringify(cfg, null, 2));
|
|
138
|
+
return;
|
|
139
|
+
}
|
|
140
|
+
logger.info(`${cfg.connectors} connectors, ${cfg.templates} templates`);
|
|
141
|
+
logger.log(` flow-statuses: ${cfg.flowStatuses.join(", ")}`);
|
|
142
|
+
logger.log(` exec-statuses: ${cfg.executionStatuses.join(", ")}`);
|
|
143
|
+
logger.log(` trigger-types: ${cfg.triggerTypes.join(", ")}`);
|
|
144
|
+
logger.log(` node-types: ${cfg.nodeTypes.join(", ")}`);
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
// ─── Flow CRUD ────────────────────────────────────────────
|
|
148
|
+
|
|
149
|
+
root
|
|
150
|
+
.command("create")
|
|
151
|
+
.description("Create a flow")
|
|
152
|
+
.requiredOption("-n, --name <name>", "Flow name")
|
|
153
|
+
.option("-d, --description <text>", "Flow description")
|
|
154
|
+
.option(
|
|
155
|
+
"-N, --nodes <json>",
|
|
156
|
+
'Nodes array as JSON (e.g. \'[{"id":"n1","type":"action","connector":"slack","action":"postMessage"}]\')',
|
|
157
|
+
)
|
|
158
|
+
.option("-E, --edges <json>", "Edges array as JSON")
|
|
159
|
+
.option("-u, --created-by <userId>", "Creator user ID")
|
|
160
|
+
.option("-s, --schedule <cron>", "Cron expression")
|
|
161
|
+
.action(async (opts, cmd) => {
|
|
162
|
+
const db = _dbFromCtx(cmd);
|
|
163
|
+
try {
|
|
164
|
+
const flow = createFlow(db, {
|
|
165
|
+
name: opts.name,
|
|
166
|
+
description: opts.description,
|
|
167
|
+
nodes: _parseJsonArg(opts.nodes, "--nodes") || [],
|
|
168
|
+
edges: _parseJsonArg(opts.edges, "--edges") || [],
|
|
169
|
+
createdBy: opts.createdBy,
|
|
170
|
+
schedule: opts.schedule,
|
|
171
|
+
});
|
|
172
|
+
logger.success(`Flow created: ${chalk.cyan(flow.id)}`);
|
|
173
|
+
logger.log(` name: ${flow.name}`);
|
|
174
|
+
logger.log(` status: ${flow.status}`);
|
|
175
|
+
logger.log(` nodes: ${flow.nodes.length}`);
|
|
176
|
+
logger.log(` edges: ${flow.edges.length}`);
|
|
177
|
+
} catch (e) {
|
|
178
|
+
logger.error(e.message);
|
|
179
|
+
process.exit(1);
|
|
180
|
+
} finally {
|
|
181
|
+
await shutdown();
|
|
182
|
+
}
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
root
|
|
186
|
+
.command("flows")
|
|
187
|
+
.description("List flows")
|
|
188
|
+
.option("-s, --status <status>", "Filter by status")
|
|
189
|
+
.option("-l, --limit <n>", "Limit", "50")
|
|
190
|
+
.option("--json", "Output as JSON")
|
|
191
|
+
.action(async (opts, cmd) => {
|
|
192
|
+
const db = _dbFromCtx(cmd);
|
|
193
|
+
try {
|
|
194
|
+
const rows = listFlows(db, {
|
|
195
|
+
status: opts.status,
|
|
196
|
+
limit: parseInt(opts.limit, 10),
|
|
197
|
+
});
|
|
198
|
+
if (opts.json) {
|
|
199
|
+
console.log(JSON.stringify(rows, null, 2));
|
|
200
|
+
return;
|
|
201
|
+
}
|
|
202
|
+
if (rows.length === 0) {
|
|
203
|
+
logger.info("No flows");
|
|
204
|
+
return;
|
|
205
|
+
}
|
|
206
|
+
logger.info(`${rows.length} flows`);
|
|
207
|
+
for (const f of rows) {
|
|
208
|
+
logger.log(
|
|
209
|
+
` ${chalk.cyan(f.id)} ${chalk.dim(f.status.padEnd(8))} ${f.name} (${f.nodes.length}n/${f.edges.length}e)`,
|
|
210
|
+
);
|
|
211
|
+
}
|
|
212
|
+
} catch (e) {
|
|
213
|
+
logger.error(e.message);
|
|
214
|
+
process.exit(1);
|
|
215
|
+
} finally {
|
|
216
|
+
await shutdown();
|
|
217
|
+
}
|
|
218
|
+
});
|
|
219
|
+
|
|
220
|
+
root
|
|
221
|
+
.command("show <flowId>")
|
|
222
|
+
.description("Show flow detail")
|
|
223
|
+
.option("--json", "Output as JSON")
|
|
224
|
+
.action(async (flowId, opts, cmd) => {
|
|
225
|
+
const db = _dbFromCtx(cmd);
|
|
226
|
+
try {
|
|
227
|
+
const flow = getFlow(db, flowId);
|
|
228
|
+
if (!flow) {
|
|
229
|
+
logger.error(`Flow not found: ${flowId}`);
|
|
230
|
+
process.exit(1);
|
|
231
|
+
}
|
|
232
|
+
if (opts.json) {
|
|
233
|
+
console.log(JSON.stringify(flow, null, 2));
|
|
234
|
+
return;
|
|
235
|
+
}
|
|
236
|
+
logger.log(`${chalk.bold("ID:")} ${flow.id}`);
|
|
237
|
+
logger.log(`${chalk.bold("Name:")} ${flow.name}`);
|
|
238
|
+
logger.log(`${chalk.bold("Status:")} ${flow.status}`);
|
|
239
|
+
logger.log(`${chalk.bold("Schedule:")} ${flow.schedule || "—"}`);
|
|
240
|
+
logger.log(`${chalk.bold("Nodes:")} ${flow.nodes.length}`);
|
|
241
|
+
for (const n of flow.nodes) {
|
|
242
|
+
logger.log(
|
|
243
|
+
` - ${chalk.cyan(n.id)} ${n.type || "action"} ${n.connector || ""}${n.action ? "." + n.action : ""}`,
|
|
244
|
+
);
|
|
245
|
+
}
|
|
246
|
+
logger.log(`${chalk.bold("Edges:")} ${flow.edges.length}`);
|
|
247
|
+
for (const e of flow.edges) {
|
|
248
|
+
logger.log(` - ${e.from} → ${e.to}`);
|
|
249
|
+
}
|
|
250
|
+
} catch (e) {
|
|
251
|
+
logger.error(e.message);
|
|
252
|
+
process.exit(1);
|
|
253
|
+
} finally {
|
|
254
|
+
await shutdown();
|
|
255
|
+
}
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
root
|
|
259
|
+
.command("activate <flowId>")
|
|
260
|
+
.description("Set flow status = active")
|
|
261
|
+
.action(async (flowId, _opts, cmd) => {
|
|
262
|
+
const db = _dbFromCtx(cmd);
|
|
263
|
+
try {
|
|
264
|
+
const flow = updateFlowStatus(db, flowId, FLOW_STATUS.ACTIVE);
|
|
265
|
+
logger.success(`Flow ${flow.id} → active`);
|
|
266
|
+
} catch (e) {
|
|
267
|
+
logger.error(e.message);
|
|
268
|
+
process.exit(1);
|
|
269
|
+
} finally {
|
|
270
|
+
await shutdown();
|
|
271
|
+
}
|
|
272
|
+
});
|
|
273
|
+
|
|
274
|
+
root
|
|
275
|
+
.command("pause <flowId>")
|
|
276
|
+
.description("Set flow status = paused")
|
|
277
|
+
.action(async (flowId, _opts, cmd) => {
|
|
278
|
+
const db = _dbFromCtx(cmd);
|
|
279
|
+
try {
|
|
280
|
+
const flow = updateFlowStatus(db, flowId, FLOW_STATUS.PAUSED);
|
|
281
|
+
logger.success(`Flow ${flow.id} → paused`);
|
|
282
|
+
} catch (e) {
|
|
283
|
+
logger.error(e.message);
|
|
284
|
+
process.exit(1);
|
|
285
|
+
} finally {
|
|
286
|
+
await shutdown();
|
|
287
|
+
}
|
|
288
|
+
});
|
|
289
|
+
|
|
290
|
+
root
|
|
291
|
+
.command("archive <flowId>")
|
|
292
|
+
.description("Set flow status = archived")
|
|
293
|
+
.action(async (flowId, _opts, cmd) => {
|
|
294
|
+
const db = _dbFromCtx(cmd);
|
|
295
|
+
try {
|
|
296
|
+
const flow = updateFlowStatus(db, flowId, FLOW_STATUS.ARCHIVED);
|
|
297
|
+
logger.success(`Flow ${flow.id} → archived`);
|
|
298
|
+
} catch (e) {
|
|
299
|
+
logger.error(e.message);
|
|
300
|
+
process.exit(1);
|
|
301
|
+
} finally {
|
|
302
|
+
await shutdown();
|
|
303
|
+
}
|
|
304
|
+
});
|
|
305
|
+
|
|
306
|
+
root
|
|
307
|
+
.command("delete <flowId>")
|
|
308
|
+
.description("Delete a flow (cascades triggers + executions)")
|
|
309
|
+
.action(async (flowId, _opts, cmd) => {
|
|
310
|
+
const db = _dbFromCtx(cmd);
|
|
311
|
+
try {
|
|
312
|
+
deleteFlow(db, flowId);
|
|
313
|
+
logger.success(`Flow deleted: ${flowId}`);
|
|
314
|
+
} catch (e) {
|
|
315
|
+
logger.error(e.message);
|
|
316
|
+
process.exit(1);
|
|
317
|
+
} finally {
|
|
318
|
+
await shutdown();
|
|
319
|
+
}
|
|
320
|
+
});
|
|
321
|
+
|
|
322
|
+
root
|
|
323
|
+
.command("schedule <flowId>")
|
|
324
|
+
.description("Set flow cron schedule")
|
|
325
|
+
.requiredOption("-c, --cron <expr>", "Cron expression")
|
|
326
|
+
.action(async (flowId, opts, cmd) => {
|
|
327
|
+
const db = _dbFromCtx(cmd);
|
|
328
|
+
try {
|
|
329
|
+
const flow = scheduleFlow(db, flowId, opts.cron);
|
|
330
|
+
logger.success(`Flow ${flow.id} scheduled: ${flow.schedule}`);
|
|
331
|
+
} catch (e) {
|
|
332
|
+
logger.error(e.message);
|
|
333
|
+
process.exit(1);
|
|
334
|
+
} finally {
|
|
335
|
+
await shutdown();
|
|
336
|
+
}
|
|
337
|
+
});
|
|
338
|
+
|
|
339
|
+
root
|
|
340
|
+
.command("share <flowId>")
|
|
341
|
+
.description("Record flow share to another org")
|
|
342
|
+
.requiredOption("-o, --org <targetOrg>", "Target org ID")
|
|
343
|
+
.action(async (flowId, opts, cmd) => {
|
|
344
|
+
const db = _dbFromCtx(cmd);
|
|
345
|
+
try {
|
|
346
|
+
const flow = shareFlow(db, flowId, opts.org);
|
|
347
|
+
logger.success(
|
|
348
|
+
`Shared ${flow.id} with [${flow.sharedWith.join(", ")}]`,
|
|
349
|
+
);
|
|
350
|
+
} catch (e) {
|
|
351
|
+
logger.error(e.message);
|
|
352
|
+
process.exit(1);
|
|
353
|
+
} finally {
|
|
354
|
+
await shutdown();
|
|
355
|
+
}
|
|
356
|
+
});
|
|
357
|
+
|
|
358
|
+
// ─── Templates ────────────────────────────────────────────
|
|
359
|
+
|
|
360
|
+
root
|
|
361
|
+
.command("templates")
|
|
362
|
+
.description("List built-in flow templates")
|
|
363
|
+
.option("--json", "Output as JSON")
|
|
364
|
+
.action((opts) => {
|
|
365
|
+
const list = listFlowTemplates();
|
|
366
|
+
if (opts.json) {
|
|
367
|
+
console.log(JSON.stringify(list, null, 2));
|
|
368
|
+
return;
|
|
369
|
+
}
|
|
370
|
+
for (const t of list) {
|
|
371
|
+
logger.log(` ${chalk.cyan(t.id.padEnd(26))} ${chalk.dim(t.name)}`);
|
|
372
|
+
logger.log(` ${t.description}`);
|
|
373
|
+
}
|
|
374
|
+
});
|
|
375
|
+
|
|
376
|
+
root
|
|
377
|
+
.command("import-template <templateId>")
|
|
378
|
+
.description("Import a built-in template as a flow")
|
|
379
|
+
.option("-n, --name <name>", "Override flow name")
|
|
380
|
+
.option("-u, --created-by <userId>", "Creator user ID")
|
|
381
|
+
.action(async (templateId, opts, cmd) => {
|
|
382
|
+
const db = _dbFromCtx(cmd);
|
|
383
|
+
try {
|
|
384
|
+
const flow = importTemplate(db, templateId, {
|
|
385
|
+
name: opts.name,
|
|
386
|
+
createdBy: opts.createdBy,
|
|
387
|
+
});
|
|
388
|
+
logger.success(
|
|
389
|
+
`Imported ${templateId} → ${chalk.cyan(flow.id)} (${flow.name})`,
|
|
390
|
+
);
|
|
391
|
+
} catch (e) {
|
|
392
|
+
logger.error(e.message);
|
|
393
|
+
process.exit(1);
|
|
394
|
+
} finally {
|
|
395
|
+
await shutdown();
|
|
396
|
+
}
|
|
397
|
+
});
|
|
398
|
+
|
|
399
|
+
// ─── Triggers ─────────────────────────────────────────────
|
|
400
|
+
|
|
401
|
+
root
|
|
402
|
+
.command("add-trigger <flowId>")
|
|
403
|
+
.description("Add a trigger to a flow")
|
|
404
|
+
.requiredOption(
|
|
405
|
+
"-t, --type <type>",
|
|
406
|
+
"Trigger type (webhook|schedule|event|condition|manual)",
|
|
407
|
+
)
|
|
408
|
+
.requiredOption("-c, --config <json>", "Trigger config as JSON")
|
|
409
|
+
.action(async (flowId, opts, cmd) => {
|
|
410
|
+
const db = _dbFromCtx(cmd);
|
|
411
|
+
try {
|
|
412
|
+
const trig = addTrigger(db, flowId, {
|
|
413
|
+
type: opts.type,
|
|
414
|
+
config: _parseJsonArg(opts.config, "--config") || {},
|
|
415
|
+
});
|
|
416
|
+
logger.success(`Trigger added: ${chalk.cyan(trig.id)}`);
|
|
417
|
+
logger.log(` flow: ${trig.flowId}`);
|
|
418
|
+
logger.log(` type: ${trig.type}`);
|
|
419
|
+
} catch (e) {
|
|
420
|
+
logger.error(e.message);
|
|
421
|
+
process.exit(1);
|
|
422
|
+
} finally {
|
|
423
|
+
await shutdown();
|
|
424
|
+
}
|
|
425
|
+
});
|
|
426
|
+
|
|
427
|
+
root
|
|
428
|
+
.command("triggers [flowId]")
|
|
429
|
+
.description("List triggers (optionally scoped to a flow)")
|
|
430
|
+
.option("--json", "Output as JSON")
|
|
431
|
+
.action(async (flowId, opts, cmd) => {
|
|
432
|
+
const db = _dbFromCtx(cmd);
|
|
433
|
+
try {
|
|
434
|
+
const rows = listTriggers(db, flowId);
|
|
435
|
+
if (opts.json) {
|
|
436
|
+
console.log(JSON.stringify(rows, null, 2));
|
|
437
|
+
return;
|
|
438
|
+
}
|
|
439
|
+
if (rows.length === 0) {
|
|
440
|
+
logger.info("No triggers");
|
|
441
|
+
return;
|
|
442
|
+
}
|
|
443
|
+
for (const t of rows) {
|
|
444
|
+
logger.log(
|
|
445
|
+
` ${chalk.cyan(t.id)} ${chalk.dim(t.type.padEnd(10))} flow=${t.flowId} fires=${t.triggerCount} ${t.enabled ? "" : chalk.red("(disabled)")}`,
|
|
446
|
+
);
|
|
447
|
+
}
|
|
448
|
+
} catch (e) {
|
|
449
|
+
logger.error(e.message);
|
|
450
|
+
process.exit(1);
|
|
451
|
+
} finally {
|
|
452
|
+
await shutdown();
|
|
453
|
+
}
|
|
454
|
+
});
|
|
455
|
+
|
|
456
|
+
root
|
|
457
|
+
.command("enable-trigger <triggerId>")
|
|
458
|
+
.description("Enable a trigger")
|
|
459
|
+
.action(async (triggerId, _opts, cmd) => {
|
|
460
|
+
const db = _dbFromCtx(cmd);
|
|
461
|
+
try {
|
|
462
|
+
const trig = setTriggerEnabled(db, triggerId, true);
|
|
463
|
+
logger.success(`Trigger ${trig.id} enabled`);
|
|
464
|
+
} catch (e) {
|
|
465
|
+
logger.error(e.message);
|
|
466
|
+
process.exit(1);
|
|
467
|
+
} finally {
|
|
468
|
+
await shutdown();
|
|
469
|
+
}
|
|
470
|
+
});
|
|
471
|
+
|
|
472
|
+
root
|
|
473
|
+
.command("disable-trigger <triggerId>")
|
|
474
|
+
.description("Disable a trigger")
|
|
475
|
+
.action(async (triggerId, _opts, cmd) => {
|
|
476
|
+
const db = _dbFromCtx(cmd);
|
|
477
|
+
try {
|
|
478
|
+
const trig = setTriggerEnabled(db, triggerId, false);
|
|
479
|
+
logger.success(`Trigger ${trig.id} disabled`);
|
|
480
|
+
} catch (e) {
|
|
481
|
+
logger.error(e.message);
|
|
482
|
+
process.exit(1);
|
|
483
|
+
} finally {
|
|
484
|
+
await shutdown();
|
|
485
|
+
}
|
|
486
|
+
});
|
|
487
|
+
|
|
488
|
+
root
|
|
489
|
+
.command("fire-trigger <triggerId>")
|
|
490
|
+
.description("Simulate trigger firing (executes flow)")
|
|
491
|
+
.option("-i, --input <json>", "Input data as JSON")
|
|
492
|
+
.action(async (triggerId, opts, cmd) => {
|
|
493
|
+
const db = _dbFromCtx(cmd);
|
|
494
|
+
try {
|
|
495
|
+
const input = _parseJsonArg(opts.input, "--input") || {};
|
|
496
|
+
const exec = fireTrigger(db, triggerId, input);
|
|
497
|
+
logger.success(`Fired trigger → exec ${chalk.cyan(exec.id)}`);
|
|
498
|
+
logger.log(` status: ${exec.status}`);
|
|
499
|
+
logger.log(` duration: ${exec.durationMs}ms`);
|
|
500
|
+
logger.log(` steps: ${exec.stepsLog.length}`);
|
|
501
|
+
} catch (e) {
|
|
502
|
+
logger.error(e.message);
|
|
503
|
+
process.exit(1);
|
|
504
|
+
} finally {
|
|
505
|
+
await shutdown();
|
|
506
|
+
}
|
|
507
|
+
});
|
|
508
|
+
|
|
509
|
+
// ─── Execution ────────────────────────────────────────────
|
|
510
|
+
|
|
511
|
+
root
|
|
512
|
+
.command("execute <flowId>")
|
|
513
|
+
.description("Manually execute a flow (simulated)")
|
|
514
|
+
.option("-i, --input <json>", "Input data as JSON")
|
|
515
|
+
.option("--test", "Test mode (marks execution as test)")
|
|
516
|
+
.action(async (flowId, opts, cmd) => {
|
|
517
|
+
const db = _dbFromCtx(cmd);
|
|
518
|
+
try {
|
|
519
|
+
const input = _parseJsonArg(opts.input, "--input") || {};
|
|
520
|
+
const exec = executeFlow(db, flowId, {
|
|
521
|
+
inputData: input,
|
|
522
|
+
testMode: Boolean(opts.test),
|
|
523
|
+
});
|
|
524
|
+
logger.success(`Execution ${chalk.cyan(exec.id)} → ${exec.status}`);
|
|
525
|
+
logger.log(` duration: ${exec.durationMs}ms`);
|
|
526
|
+
logger.log(` steps: ${exec.stepsLog.length}`);
|
|
527
|
+
if (exec.error) logger.log(chalk.red(` error: ${exec.error}`));
|
|
528
|
+
} catch (e) {
|
|
529
|
+
logger.error(e.message);
|
|
530
|
+
process.exit(1);
|
|
531
|
+
} finally {
|
|
532
|
+
await shutdown();
|
|
533
|
+
}
|
|
534
|
+
});
|
|
535
|
+
|
|
536
|
+
root
|
|
537
|
+
.command("exec-show <execId>")
|
|
538
|
+
.description("Show execution detail")
|
|
539
|
+
.option("--json", "Output as JSON")
|
|
540
|
+
.action(async (execId, opts, cmd) => {
|
|
541
|
+
const db = _dbFromCtx(cmd);
|
|
542
|
+
try {
|
|
543
|
+
const exec = getExecution(db, execId);
|
|
544
|
+
if (!exec) {
|
|
545
|
+
logger.error(`Execution not found: ${execId}`);
|
|
546
|
+
process.exit(1);
|
|
547
|
+
}
|
|
548
|
+
if (opts.json) {
|
|
549
|
+
console.log(JSON.stringify(exec, null, 2));
|
|
550
|
+
return;
|
|
551
|
+
}
|
|
552
|
+
logger.log(`${chalk.bold("ID:")} ${exec.id}`);
|
|
553
|
+
logger.log(`${chalk.bold("Flow:")} ${exec.flowId}`);
|
|
554
|
+
logger.log(`${chalk.bold("Trigger:")} ${exec.triggerType}`);
|
|
555
|
+
logger.log(`${chalk.bold("Status:")} ${exec.status}`);
|
|
556
|
+
logger.log(`${chalk.bold("Duration:")} ${exec.durationMs}ms`);
|
|
557
|
+
logger.log(`${chalk.bold("Test:")} ${exec.testMode}`);
|
|
558
|
+
logger.log(chalk.bold("Steps:"));
|
|
559
|
+
for (const s of exec.stepsLog) {
|
|
560
|
+
logger.log(
|
|
561
|
+
` ${chalk.cyan(s.nodeId)} ${s.nodeType} ${s.connector || ""}${s.action ? "." + s.action : ""} → ${s.status} ${s.durationMs}ms`,
|
|
562
|
+
);
|
|
563
|
+
}
|
|
564
|
+
if (exec.error) logger.log(chalk.red(`Error: ${exec.error}`));
|
|
565
|
+
} catch (e) {
|
|
566
|
+
logger.error(e.message);
|
|
567
|
+
process.exit(1);
|
|
568
|
+
} finally {
|
|
569
|
+
await shutdown();
|
|
570
|
+
}
|
|
571
|
+
});
|
|
572
|
+
|
|
573
|
+
root
|
|
574
|
+
.command("logs")
|
|
575
|
+
.description("List executions")
|
|
576
|
+
.option("-f, --flow <flowId>", "Filter by flow ID")
|
|
577
|
+
.option("-s, --status <status>", "Filter by status")
|
|
578
|
+
.option("-l, --limit <n>", "Limit", "50")
|
|
579
|
+
.option("--json", "Output as JSON")
|
|
580
|
+
.action(async (opts, cmd) => {
|
|
581
|
+
const db = _dbFromCtx(cmd);
|
|
582
|
+
try {
|
|
583
|
+
const rows = listExecutions(db, {
|
|
584
|
+
flowId: opts.flow,
|
|
585
|
+
status: opts.status,
|
|
586
|
+
limit: parseInt(opts.limit, 10),
|
|
587
|
+
});
|
|
588
|
+
if (opts.json) {
|
|
589
|
+
console.log(JSON.stringify(rows, null, 2));
|
|
590
|
+
return;
|
|
591
|
+
}
|
|
592
|
+
if (rows.length === 0) {
|
|
593
|
+
logger.info("No executions");
|
|
594
|
+
return;
|
|
595
|
+
}
|
|
596
|
+
for (const e of rows) {
|
|
597
|
+
logger.log(
|
|
598
|
+
` ${chalk.cyan(e.id)} ${chalk.dim(e.status.padEnd(9))} flow=${e.flowId} ${e.durationMs}ms via ${e.triggerType}`,
|
|
599
|
+
);
|
|
600
|
+
}
|
|
601
|
+
} catch (e) {
|
|
602
|
+
logger.error(e.message);
|
|
603
|
+
process.exit(1);
|
|
604
|
+
} finally {
|
|
605
|
+
await shutdown();
|
|
606
|
+
}
|
|
607
|
+
});
|
|
608
|
+
|
|
609
|
+
// ─── Stats ────────────────────────────────────────────────
|
|
610
|
+
|
|
611
|
+
root
|
|
612
|
+
.command("stats")
|
|
613
|
+
.description("Show automation engine stats")
|
|
614
|
+
.option("--json", "Output as JSON")
|
|
615
|
+
.action(async (opts, cmd) => {
|
|
616
|
+
const db = _dbFromCtx(cmd);
|
|
617
|
+
try {
|
|
618
|
+
const s = getStats(db);
|
|
619
|
+
if (opts.json) {
|
|
620
|
+
console.log(JSON.stringify(s, null, 2));
|
|
621
|
+
return;
|
|
622
|
+
}
|
|
623
|
+
logger.log(chalk.bold("Flows"));
|
|
624
|
+
logger.log(` total: ${s.flows.total}`);
|
|
625
|
+
for (const [k, v] of Object.entries(s.flows.byStatus)) {
|
|
626
|
+
logger.log(` ${k.padEnd(10)} ${v}`);
|
|
627
|
+
}
|
|
628
|
+
logger.log(chalk.bold("Executions"));
|
|
629
|
+
logger.log(` total: ${s.executions.total}`);
|
|
630
|
+
logger.log(
|
|
631
|
+
` success rate: ${(s.executions.successRate * 100).toFixed(1)}%`,
|
|
632
|
+
);
|
|
633
|
+
logger.log(
|
|
634
|
+
` avg duration: ${s.executions.avgDurationMs.toFixed(1)}ms`,
|
|
635
|
+
);
|
|
636
|
+
for (const [k, v] of Object.entries(s.executions.byStatus)) {
|
|
637
|
+
logger.log(` ${k.padEnd(10)} ${v}`);
|
|
638
|
+
}
|
|
639
|
+
logger.log(chalk.bold("Triggers"));
|
|
640
|
+
logger.log(` total: ${s.triggers.total}`);
|
|
641
|
+
for (const [k, v] of Object.entries(s.triggers.byType)) {
|
|
642
|
+
logger.log(` ${k.padEnd(10)} ${v}`);
|
|
643
|
+
}
|
|
644
|
+
logger.log(
|
|
645
|
+
chalk.dim(`Connectors: ${s.connectors} Templates: ${s.templates}`),
|
|
646
|
+
);
|
|
647
|
+
} catch (e) {
|
|
648
|
+
logger.error(e.message);
|
|
649
|
+
process.exit(1);
|
|
650
|
+
} finally {
|
|
651
|
+
await shutdown();
|
|
652
|
+
}
|
|
653
|
+
});
|
|
654
|
+
}
|