@nocobase/plugin-workflow 1.9.0-beta.8 → 2.0.0-alpha.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +6 -0
- package/dist/client/248e211bb2d99aee.js +10 -0
- package/dist/client/{9e124936e3877c66.js → 27bd65abee87cafa.js} +1 -1
- package/dist/client/RemoveNodeContext.d.ts +11 -0
- package/dist/client/components/TriggerWorkflowSelect.d.ts +10 -0
- package/dist/client/components/index.d.ts +1 -0
- package/dist/client/e7c028a099537ab1.js +10 -0
- package/dist/client/{2a8332e23037d42f.js → f68fbc145c3ddec3.js} +1 -1
- package/dist/client/flows/triggerWorkflows.d.ts +120 -0
- package/dist/client/index.d.ts +1 -0
- package/dist/client/index.js +1 -1
- package/dist/client/nodes/condition.d.ts +0 -3
- package/dist/client/schemas/executions.d.ts +8 -8
- package/dist/client/triggers/index.d.ts +1 -1
- package/dist/common/collections/executions.d.ts +8 -8
- package/dist/common/collections/executions.js +2 -2
- package/dist/common/collections/flow_nodes.d.ts +21 -0
- package/dist/common/collections/flow_nodes.js +6 -0
- package/dist/common/collections/userWorkflowTasks.d.ts +13 -0
- package/dist/common/collections/userWorkflowTasks.js +6 -0
- package/dist/common/collections/workflowCategories.d.ts +21 -0
- package/dist/common/collections/workflowCategories.js +6 -0
- package/dist/common/collections/workflows.d.ts +42 -0
- package/dist/common/collections/workflows.js +6 -0
- package/dist/externalVersion.js +16 -16
- package/dist/locale/zh-CN.json +14 -3
- package/dist/node_modules/cron-parser/package.json +1 -1
- package/dist/node_modules/lru-cache/package.json +1 -1
- package/dist/node_modules/nodejs-snowflake/package.json +1 -1
- package/dist/server/Dispatcher.d.ts +47 -1
- package/dist/server/Dispatcher.js +368 -1
- package/dist/server/Plugin.d.ts +4 -24
- package/dist/server/Plugin.js +16 -316
- package/dist/server/actions/nodes.js +86 -22
- package/dist/server/index.d.ts +2 -1
- package/dist/server/index.js +0 -2
- package/dist/server/triggers/CollectionTrigger.d.ts +1 -1
- package/dist/server/triggers/ScheduleTrigger/DateFieldScheduleTrigger.d.ts +5 -5
- package/dist/server/triggers/ScheduleTrigger/DateFieldScheduleTrigger.js +33 -13
- package/package.json +16 -16
- package/dist/client/3b0762a72796b5f8.js +0 -10
- package/dist/client/48fc0fadf459229d.js +0 -10
package/dist/server/Plugin.js
CHANGED
|
@@ -40,16 +40,14 @@ __export(Plugin_exports, {
|
|
|
40
40
|
});
|
|
41
41
|
module.exports = __toCommonJS(Plugin_exports);
|
|
42
42
|
var import_path = __toESM(require("path"));
|
|
43
|
-
var import_crypto = require("crypto");
|
|
44
43
|
var import_nodejs_snowflake = require("nodejs-snowflake");
|
|
45
|
-
var import_sequelize = require("sequelize");
|
|
46
44
|
var import_lru_cache = __toESM(require("lru-cache"));
|
|
47
45
|
var import_database = require("@nocobase/database");
|
|
48
46
|
var import_server = require("@nocobase/server");
|
|
49
47
|
var import_utils = require("@nocobase/utils");
|
|
48
|
+
var import_Dispatcher = __toESM(require("./Dispatcher"));
|
|
50
49
|
var import_Processor = __toESM(require("./Processor"));
|
|
51
50
|
var import_actions = __toESM(require("./actions"));
|
|
52
|
-
var import_constants = require("./constants");
|
|
53
51
|
var import_functions = __toESM(require("./functions"));
|
|
54
52
|
var import_CollectionTrigger = __toESM(require("./triggers/CollectionTrigger"));
|
|
55
53
|
var import_ScheduleTrigger = __toESM(require("./triggers/ScheduleTrigger"));
|
|
@@ -61,37 +59,16 @@ var import_DestroyInstruction = __toESM(require("./instructions/DestroyInstructi
|
|
|
61
59
|
var import_QueryInstruction = __toESM(require("./instructions/QueryInstruction"));
|
|
62
60
|
var import_UpdateInstruction = __toESM(require("./instructions/UpdateInstruction"));
|
|
63
61
|
var import_WorkflowRepository = __toESM(require("./repositories/WorkflowRepository"));
|
|
64
|
-
const WORKER_JOB_WORKFLOW_PROCESS = "workflow:process";
|
|
65
62
|
class PluginWorkflowServer extends import_server.Plugin {
|
|
66
63
|
instructions = new import_utils.Registry();
|
|
67
64
|
triggers = new import_utils.Registry();
|
|
68
65
|
functions = new import_utils.Registry();
|
|
69
66
|
enabledCache = /* @__PURE__ */ new Map();
|
|
70
67
|
snowflake;
|
|
71
|
-
|
|
72
|
-
executing = null;
|
|
73
|
-
pending = [];
|
|
74
|
-
events = [];
|
|
75
|
-
eventsCount = 0;
|
|
68
|
+
dispatcher = new import_Dispatcher.default(this);
|
|
76
69
|
loggerCache;
|
|
77
70
|
meter = null;
|
|
78
71
|
checker = null;
|
|
79
|
-
onQueueExecution = async (event) => {
|
|
80
|
-
const ExecutionRepo = this.db.getRepository("executions");
|
|
81
|
-
const execution = await ExecutionRepo.findOne({
|
|
82
|
-
filterByTk: event.executionId
|
|
83
|
-
});
|
|
84
|
-
if (!execution || execution.dispatched) {
|
|
85
|
-
this.getLogger("dispatcher").info(
|
|
86
|
-
`execution (${event.executionId}) from queue not found or not in queueing status, skip`
|
|
87
|
-
);
|
|
88
|
-
return;
|
|
89
|
-
}
|
|
90
|
-
this.getLogger(execution.workflowId).info(
|
|
91
|
-
`execution (${execution.id}) received from queue, adding to pending list`
|
|
92
|
-
);
|
|
93
|
-
this.run(execution);
|
|
94
|
-
};
|
|
95
72
|
onBeforeSave = async (instance, { transaction, cycling }) => {
|
|
96
73
|
if (cycling) {
|
|
97
74
|
return;
|
|
@@ -161,7 +138,7 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
161
138
|
// * add all hooks for enabled workflows
|
|
162
139
|
// * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks
|
|
163
140
|
onAfterStart = async () => {
|
|
164
|
-
this.
|
|
141
|
+
this.dispatcher.setReady(true);
|
|
165
142
|
const collection = this.db.getCollection("workflows");
|
|
166
143
|
const workflows = await collection.repository.find({
|
|
167
144
|
appends: ["versionStats"]
|
|
@@ -182,28 +159,22 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
182
159
|
}
|
|
183
160
|
this.checker = setInterval(() => {
|
|
184
161
|
this.getLogger("dispatcher").debug(`(cycling) check for queueing executions`);
|
|
185
|
-
this.dispatch();
|
|
162
|
+
this.dispatcher.dispatch();
|
|
186
163
|
}, 3e5);
|
|
187
164
|
this.app.on("workflow:dispatch", () => {
|
|
188
165
|
this.app.logger.info("workflow:dispatch");
|
|
189
|
-
this.dispatch();
|
|
166
|
+
this.dispatcher.dispatch();
|
|
190
167
|
});
|
|
191
168
|
this.getLogger("dispatcher").info("(starting) check for queueing executions");
|
|
192
|
-
this.dispatch();
|
|
193
|
-
this.
|
|
169
|
+
this.dispatcher.dispatch();
|
|
170
|
+
this.dispatcher.setReady(true);
|
|
194
171
|
};
|
|
195
172
|
onBeforeStop = async () => {
|
|
196
173
|
this.app.logger.info(`stopping workflow plugin before app (${this.app.name}) shutdown...`);
|
|
197
174
|
for (const workflow of this.enabledCache.values()) {
|
|
198
175
|
this.toggle(workflow, false, { silent: true });
|
|
199
176
|
}
|
|
200
|
-
this.
|
|
201
|
-
if (this.events.length) {
|
|
202
|
-
await this.prepare();
|
|
203
|
-
}
|
|
204
|
-
if (this.executing) {
|
|
205
|
-
await this.executing;
|
|
206
|
-
}
|
|
177
|
+
await this.dispatcher.beforeStop();
|
|
207
178
|
if (this.checker) {
|
|
208
179
|
clearInterval(this.checker);
|
|
209
180
|
}
|
|
@@ -309,8 +280,8 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
309
280
|
custom_epoch: pluginRecord == null ? void 0 : pluginRecord.createdAt.getTime()
|
|
310
281
|
});
|
|
311
282
|
this.app.backgroundJobManager.subscribe(`${this.name}.pendingExecution`, {
|
|
312
|
-
idle: () => this.app.serving(WORKER_JOB_WORKFLOW_PROCESS) &&
|
|
313
|
-
process: this.onQueueExecution
|
|
283
|
+
idle: () => this.app.serving(import_Dispatcher.WORKER_JOB_WORKFLOW_PROCESS) && this.dispatcher.idle,
|
|
284
|
+
process: this.dispatcher.onQueueExecution
|
|
314
285
|
});
|
|
315
286
|
}
|
|
316
287
|
/**
|
|
@@ -332,7 +303,7 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
332
303
|
this.meter = this.app.telemetry.metric.getMeter();
|
|
333
304
|
const counter = this.meter.createObservableGauge("workflow.events.counter");
|
|
334
305
|
counter.addCallback((result) => {
|
|
335
|
-
result.observe(this.
|
|
306
|
+
result.observe(this.dispatcher.getEventsCount());
|
|
336
307
|
});
|
|
337
308
|
this.app.acl.registerSnippet({
|
|
338
309
|
name: `pm.${this.name}.workflows`,
|
|
@@ -397,295 +368,24 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
397
368
|
}
|
|
398
369
|
}
|
|
399
370
|
trigger(workflow, context, options = {}) {
|
|
400
|
-
|
|
401
|
-
if (!this.ready) {
|
|
402
|
-
logger.warn(`app is not ready, event of workflow ${workflow.id} will be ignored`);
|
|
403
|
-
logger.debug(`ignored event data:`, context);
|
|
404
|
-
return;
|
|
405
|
-
}
|
|
406
|
-
if (!options.force && !options.manually && !workflow.enabled) {
|
|
407
|
-
logger.warn(`workflow ${workflow.id} is not enabled, event will be ignored`);
|
|
408
|
-
return;
|
|
409
|
-
}
|
|
410
|
-
const duplicated = this.events.find(([w, c, { eventKey }]) => {
|
|
411
|
-
if (eventKey && options.eventKey) {
|
|
412
|
-
return eventKey === options.eventKey;
|
|
413
|
-
}
|
|
414
|
-
});
|
|
415
|
-
if (duplicated) {
|
|
416
|
-
logger.warn(`event of workflow ${workflow.id} is duplicated (${options.eventKey}), event will be ignored`);
|
|
417
|
-
return;
|
|
418
|
-
}
|
|
419
|
-
if (context == null) {
|
|
420
|
-
logger.warn(`workflow ${workflow.id} event data context is null, event will be ignored`);
|
|
421
|
-
return;
|
|
422
|
-
}
|
|
423
|
-
if (options.manually || this.isWorkflowSync(workflow)) {
|
|
424
|
-
return this.triggerSync(workflow, context, options);
|
|
425
|
-
}
|
|
426
|
-
const { transaction, ...rest } = options;
|
|
427
|
-
this.events.push([workflow, context, rest]);
|
|
428
|
-
this.eventsCount = this.events.length;
|
|
429
|
-
logger.info(`new event triggered, now events: ${this.events.length}`);
|
|
430
|
-
logger.debug(`event data:`, { context });
|
|
431
|
-
if (this.events.length > 1) {
|
|
432
|
-
logger.info(`new event is pending to be prepared after previous preparation is finished`);
|
|
433
|
-
return;
|
|
434
|
-
}
|
|
435
|
-
setImmediate(this.prepare);
|
|
371
|
+
return this.dispatcher.trigger(workflow, context, options);
|
|
436
372
|
}
|
|
437
|
-
async
|
|
438
|
-
|
|
439
|
-
try {
|
|
440
|
-
execution = await this.createExecution(workflow, context, options);
|
|
441
|
-
} catch (err) {
|
|
442
|
-
this.getLogger(workflow.id).error(`creating execution failed: ${err.message}`, err);
|
|
443
|
-
return null;
|
|
444
|
-
}
|
|
445
|
-
try {
|
|
446
|
-
return this.process(execution, null, options);
|
|
447
|
-
} catch (err) {
|
|
448
|
-
this.getLogger(execution.workflowId).error(`execution (${execution.id}) error: ${err.message}`, err);
|
|
449
|
-
}
|
|
450
|
-
return null;
|
|
451
|
-
}
|
|
452
|
-
async run(execution, job) {
|
|
453
|
-
while (this.executing) {
|
|
454
|
-
await this.executing;
|
|
455
|
-
}
|
|
456
|
-
this.executing = this.process(execution, job);
|
|
457
|
-
await this.executing;
|
|
458
|
-
this.executing = null;
|
|
459
|
-
this.dispatch();
|
|
373
|
+
async run(pending) {
|
|
374
|
+
return this.dispatcher.run(pending);
|
|
460
375
|
}
|
|
461
376
|
async resume(job) {
|
|
462
|
-
|
|
463
|
-
if (!execution) {
|
|
464
|
-
execution = await job.getExecution();
|
|
465
|
-
}
|
|
466
|
-
this.getLogger(execution.workflowId).info(
|
|
467
|
-
`execution (${execution.id}) resuming from job (${job.id}) added to pending list`
|
|
468
|
-
);
|
|
469
|
-
this.run(execution, job);
|
|
377
|
+
return this.dispatcher.resume(job);
|
|
470
378
|
}
|
|
471
379
|
/**
|
|
472
380
|
* Start a deferred execution
|
|
473
381
|
* @experimental
|
|
474
382
|
*/
|
|
475
383
|
async start(execution) {
|
|
476
|
-
|
|
477
|
-
return;
|
|
478
|
-
}
|
|
479
|
-
this.getLogger(execution.workflowId).info(`starting deferred execution (${execution.id})`);
|
|
480
|
-
this.run(execution);
|
|
481
|
-
}
|
|
482
|
-
async validateEvent(workflow, context, options) {
|
|
483
|
-
const trigger = this.triggers.get(workflow.type);
|
|
484
|
-
const triggerValid = await trigger.validateEvent(workflow, context, options);
|
|
485
|
-
if (!triggerValid) {
|
|
486
|
-
return false;
|
|
487
|
-
}
|
|
488
|
-
const { stack } = options;
|
|
489
|
-
let valid = true;
|
|
490
|
-
if ((stack == null ? void 0 : stack.length) > 0) {
|
|
491
|
-
const existed = await workflow.countExecutions({
|
|
492
|
-
where: {
|
|
493
|
-
id: stack
|
|
494
|
-
},
|
|
495
|
-
transaction: options.transaction
|
|
496
|
-
});
|
|
497
|
-
const limitCount = workflow.options.stackLimit || 1;
|
|
498
|
-
if (existed >= limitCount) {
|
|
499
|
-
this.getLogger(workflow.id).warn(
|
|
500
|
-
`workflow ${workflow.id} has already been triggered in stacks executions (${stack}), and max call coont is ${limitCount}, newly triggering will be skipped.`
|
|
501
|
-
);
|
|
502
|
-
valid = false;
|
|
503
|
-
}
|
|
504
|
-
}
|
|
505
|
-
return valid;
|
|
506
|
-
}
|
|
507
|
-
async createExecution(workflow, context, options) {
|
|
508
|
-
var _a;
|
|
509
|
-
const { deferred } = options;
|
|
510
|
-
const transaction = await this.useDataSourceTransaction("main", options.transaction, true);
|
|
511
|
-
const sameTransaction = options.transaction === transaction;
|
|
512
|
-
const valid = await this.validateEvent(workflow, context, { ...options, transaction });
|
|
513
|
-
if (!valid) {
|
|
514
|
-
if (!sameTransaction) {
|
|
515
|
-
await transaction.commit();
|
|
516
|
-
}
|
|
517
|
-
(_a = options.onTriggerFail) == null ? void 0 : _a.call(options, workflow, context, options);
|
|
518
|
-
return Promise.reject(new Error("event is not valid"));
|
|
519
|
-
}
|
|
520
|
-
let execution;
|
|
521
|
-
try {
|
|
522
|
-
execution = await workflow.createExecution(
|
|
523
|
-
{
|
|
524
|
-
context,
|
|
525
|
-
key: workflow.key,
|
|
526
|
-
eventKey: options.eventKey ?? (0, import_crypto.randomUUID)(),
|
|
527
|
-
stack: options.stack,
|
|
528
|
-
dispatched: deferred ?? false
|
|
529
|
-
},
|
|
530
|
-
{ transaction }
|
|
531
|
-
);
|
|
532
|
-
} catch (err) {
|
|
533
|
-
if (!sameTransaction) {
|
|
534
|
-
await transaction.rollback();
|
|
535
|
-
}
|
|
536
|
-
throw err;
|
|
537
|
-
}
|
|
538
|
-
this.getLogger(workflow.id).info(`execution of workflow ${workflow.id} created as ${execution.id}`);
|
|
539
|
-
if (!workflow.stats) {
|
|
540
|
-
workflow.stats = await workflow.getStats({ transaction });
|
|
541
|
-
}
|
|
542
|
-
await workflow.stats.increment("executed", { transaction });
|
|
543
|
-
if (this.db.options.dialect !== "postgres") {
|
|
544
|
-
await workflow.stats.reload({ transaction });
|
|
545
|
-
}
|
|
546
|
-
if (!workflow.versionStats) {
|
|
547
|
-
workflow.versionStats = await workflow.getVersionStats({ transaction });
|
|
548
|
-
}
|
|
549
|
-
await workflow.versionStats.increment("executed", { transaction });
|
|
550
|
-
if (this.db.options.dialect !== "postgres") {
|
|
551
|
-
await workflow.versionStats.reload({ transaction });
|
|
552
|
-
}
|
|
553
|
-
if (!sameTransaction) {
|
|
554
|
-
await transaction.commit();
|
|
555
|
-
}
|
|
556
|
-
execution.workflow = workflow;
|
|
557
|
-
return execution;
|
|
558
|
-
}
|
|
559
|
-
prepare = async () => {
|
|
560
|
-
if (this.executing && this.db.options.dialect === "sqlite") {
|
|
561
|
-
await this.executing;
|
|
562
|
-
}
|
|
563
|
-
const event = this.events.shift();
|
|
564
|
-
this.eventsCount = this.events.length;
|
|
565
|
-
if (!event) {
|
|
566
|
-
this.getLogger("dispatcher").info(`events queue is empty, no need to prepare`);
|
|
567
|
-
return;
|
|
568
|
-
}
|
|
569
|
-
const logger = this.getLogger(event[0].id);
|
|
570
|
-
logger.info(`preparing execution for event`);
|
|
571
|
-
try {
|
|
572
|
-
const execution = await this.createExecution(...event);
|
|
573
|
-
if (!(execution == null ? void 0 : execution.dispatched)) {
|
|
574
|
-
if (!this.executing && !this.pending.length) {
|
|
575
|
-
logger.info(`local pending list is empty, adding execution (${execution.id}) to pending list`);
|
|
576
|
-
this.pending.push([execution]);
|
|
577
|
-
} else {
|
|
578
|
-
logger.info(`local pending list is not empty, sending execution (${execution.id}) to queue`);
|
|
579
|
-
if (this.ready) {
|
|
580
|
-
this.app.backgroundJobManager.publish(`${this.name}.pendingExecution`, { executionId: execution.id });
|
|
581
|
-
}
|
|
582
|
-
}
|
|
583
|
-
}
|
|
584
|
-
} catch (error) {
|
|
585
|
-
logger.error(`failed to create execution:`, { error });
|
|
586
|
-
}
|
|
587
|
-
if (this.events.length) {
|
|
588
|
-
await this.prepare();
|
|
589
|
-
} else {
|
|
590
|
-
this.getLogger("dispatcher").info("no more events need to be prepared, dispatching...");
|
|
591
|
-
if (this.executing) {
|
|
592
|
-
await this.executing;
|
|
593
|
-
}
|
|
594
|
-
this.dispatch();
|
|
595
|
-
}
|
|
596
|
-
};
|
|
597
|
-
dispatch() {
|
|
598
|
-
if (!this.ready) {
|
|
599
|
-
this.getLogger("dispatcher").warn(`app is not ready, new dispatching will be ignored`);
|
|
600
|
-
return;
|
|
601
|
-
}
|
|
602
|
-
if (!this.app.serving(WORKER_JOB_WORKFLOW_PROCESS)) {
|
|
603
|
-
this.getLogger("dispatcher").warn(
|
|
604
|
-
`${WORKER_JOB_WORKFLOW_PROCESS} is not serving, new dispatching will be ignored`
|
|
605
|
-
);
|
|
606
|
-
return;
|
|
607
|
-
}
|
|
608
|
-
if (this.executing) {
|
|
609
|
-
this.getLogger("dispatcher").warn(`workflow executing is not finished, new dispatching will be ignored`);
|
|
610
|
-
return;
|
|
611
|
-
}
|
|
612
|
-
if (this.events.length) {
|
|
613
|
-
return this.prepare();
|
|
614
|
-
}
|
|
615
|
-
this.executing = (async () => {
|
|
616
|
-
let next = null;
|
|
617
|
-
if (this.pending.length) {
|
|
618
|
-
next = this.pending.shift();
|
|
619
|
-
this.getLogger(next[0].workflowId).info(`pending execution (${next[0].id}) ready to process`);
|
|
620
|
-
} else {
|
|
621
|
-
try {
|
|
622
|
-
await this.db.sequelize.transaction(
|
|
623
|
-
{
|
|
624
|
-
isolationLevel: this.db.options.dialect === "sqlite" ? [][0] : import_sequelize.Transaction.ISOLATION_LEVELS.REPEATABLE_READ
|
|
625
|
-
},
|
|
626
|
-
async (transaction) => {
|
|
627
|
-
const execution = await this.db.getRepository("executions").findOne({
|
|
628
|
-
filter: {
|
|
629
|
-
dispatched: false,
|
|
630
|
-
"workflow.enabled": true
|
|
631
|
-
},
|
|
632
|
-
sort: "id",
|
|
633
|
-
transaction
|
|
634
|
-
});
|
|
635
|
-
if (execution) {
|
|
636
|
-
this.getLogger(execution.workflowId).info(`execution (${execution.id}) fetched from db`);
|
|
637
|
-
await execution.update(
|
|
638
|
-
{
|
|
639
|
-
dispatched: true,
|
|
640
|
-
status: import_constants.EXECUTION_STATUS.STARTED
|
|
641
|
-
},
|
|
642
|
-
{ transaction }
|
|
643
|
-
);
|
|
644
|
-
execution.workflow = this.enabledCache.get(execution.workflowId);
|
|
645
|
-
next = [execution];
|
|
646
|
-
} else {
|
|
647
|
-
this.getLogger("dispatcher").debug(`no execution in db queued to process`);
|
|
648
|
-
}
|
|
649
|
-
}
|
|
650
|
-
);
|
|
651
|
-
} catch (error) {
|
|
652
|
-
this.getLogger("dispatcher").error(`fetching execution from db failed: ${error.message}`, { error });
|
|
653
|
-
}
|
|
654
|
-
}
|
|
655
|
-
if (next) {
|
|
656
|
-
await this.process(...next);
|
|
657
|
-
}
|
|
658
|
-
this.executing = null;
|
|
659
|
-
if (next || this.pending.length) {
|
|
660
|
-
this.getLogger("dispatcher").debug(`last process finished, will do another dispatch`);
|
|
661
|
-
this.dispatch();
|
|
662
|
-
}
|
|
663
|
-
})();
|
|
384
|
+
return this.dispatcher.start(execution);
|
|
664
385
|
}
|
|
665
386
|
createProcessor(execution, options = {}) {
|
|
666
387
|
return new import_Processor.default(execution, { ...options, plugin: this });
|
|
667
388
|
}
|
|
668
|
-
async process(execution, job, options = {}) {
|
|
669
|
-
var _a, _b;
|
|
670
|
-
const logger = this.getLogger(execution.workflowId);
|
|
671
|
-
if (!execution.dispatched) {
|
|
672
|
-
const transaction = await this.useDataSourceTransaction("main", options.transaction);
|
|
673
|
-
await execution.update({ dispatched: true, status: import_constants.EXECUTION_STATUS.STARTED }, { transaction });
|
|
674
|
-
logger.info(`execution (${execution.id}) from pending list updated to started`);
|
|
675
|
-
}
|
|
676
|
-
const processor = this.createProcessor(execution, options);
|
|
677
|
-
logger.info(`execution (${execution.id}) ${job ? "resuming" : "starting"}...`);
|
|
678
|
-
try {
|
|
679
|
-
await (job ? processor.resume(job) : processor.start());
|
|
680
|
-
logger.info(`execution (${execution.id}) finished with status: ${execution.status}`, { execution });
|
|
681
|
-
if (execution.status && ((_b = (_a = execution.workflow.options) == null ? void 0 : _a.deleteExecutionOnStatus) == null ? void 0 : _b.includes(execution.status))) {
|
|
682
|
-
await execution.destroy({ transaction: processor.mainTransaction });
|
|
683
|
-
}
|
|
684
|
-
} catch (err) {
|
|
685
|
-
logger.error(`execution (${execution.id}) error: ${err.message}`, err);
|
|
686
|
-
}
|
|
687
|
-
return processor;
|
|
688
|
-
}
|
|
689
389
|
async execute(workflow, values, options = {}) {
|
|
690
390
|
const trigger = this.triggers.get(workflow.type);
|
|
691
391
|
if (!trigger) {
|
|
@@ -49,12 +49,22 @@ async function create(context, next) {
|
|
|
49
49
|
const { db } = context;
|
|
50
50
|
const repository = import_actions.utils.getRepositoryFromParams(context);
|
|
51
51
|
const { whitelist, blacklist, updateAssociationValues, values, associatedIndex: workflowId } = context.action.params;
|
|
52
|
+
const workflowPlugin = context.app.pm.get(import__.default);
|
|
52
53
|
context.body = await db.sequelize.transaction(async (transaction) => {
|
|
53
|
-
const workflow = await repository.getSourceModel(transaction);
|
|
54
|
-
workflow.versionStats
|
|
54
|
+
const workflow = workflowPlugin.enabledCache.get(Number.parseInt(workflowId, 10)) || await repository.getSourceModel(transaction);
|
|
55
|
+
if (!workflow.versionStats) {
|
|
56
|
+
workflow.versionStats = await workflow.getVersionStats({ transaction });
|
|
57
|
+
}
|
|
55
58
|
if (workflow.versionStats.executed > 0) {
|
|
56
59
|
context.throw(400, "Node could not be created in executed workflow");
|
|
57
60
|
}
|
|
61
|
+
const NODES_LIMIT = process.env.WORKFLOW_NODES_LIMIT ? parseInt(process.env.WORKFLOW_NODES_LIMIT, 10) : null;
|
|
62
|
+
if (NODES_LIMIT) {
|
|
63
|
+
const nodesCount = await workflow.countNodes({ transaction });
|
|
64
|
+
if (nodesCount >= NODES_LIMIT) {
|
|
65
|
+
context.throw(400, `The number of nodes in a workflow cannot exceed ${NODES_LIMIT}`);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
58
68
|
const instance = await repository.create({
|
|
59
69
|
values,
|
|
60
70
|
whitelist,
|
|
@@ -136,38 +146,32 @@ function searchBranchDownstreams(nodes, from) {
|
|
|
136
146
|
}
|
|
137
147
|
return result;
|
|
138
148
|
}
|
|
149
|
+
function findBranchTail(branchHead) {
|
|
150
|
+
let tail = branchHead;
|
|
151
|
+
while (tail.downstream) {
|
|
152
|
+
tail = tail.downstream;
|
|
153
|
+
}
|
|
154
|
+
return tail;
|
|
155
|
+
}
|
|
139
156
|
async function destroy(context, next) {
|
|
140
157
|
const { db } = context;
|
|
141
158
|
const repository = import_actions.utils.getRepositoryFromParams(context);
|
|
142
|
-
const { filterByTk } = context.action.params;
|
|
159
|
+
const { filterByTk, keepBranch } = context.action.params;
|
|
160
|
+
const keepBranchIndex = keepBranch == null || keepBranch === "" ? null : Number.parseInt(keepBranch, 10);
|
|
143
161
|
const fields = ["id", "upstreamId", "downstreamId", "branchIndex"];
|
|
144
162
|
const instance = await repository.findOne({
|
|
145
163
|
filterByTk,
|
|
146
164
|
fields: [...fields, "workflowId"],
|
|
147
165
|
appends: ["upstream", "downstream", "workflow.versionStats.executed"]
|
|
148
166
|
});
|
|
167
|
+
if (!instance) {
|
|
168
|
+
context.throw(404, "Node not found");
|
|
169
|
+
}
|
|
149
170
|
if (instance.workflow.versionStats.executed > 0) {
|
|
150
171
|
context.throw(400, "Nodes in executed workflow could not be deleted");
|
|
151
172
|
}
|
|
152
173
|
await db.sequelize.transaction(async (transaction) => {
|
|
153
174
|
const { upstream, downstream } = instance.get();
|
|
154
|
-
if (upstream && upstream.downstreamId === instance.id) {
|
|
155
|
-
await upstream.update(
|
|
156
|
-
{
|
|
157
|
-
downstreamId: instance.downstreamId
|
|
158
|
-
},
|
|
159
|
-
{ transaction }
|
|
160
|
-
);
|
|
161
|
-
}
|
|
162
|
-
if (downstream) {
|
|
163
|
-
await downstream.update(
|
|
164
|
-
{
|
|
165
|
-
upstreamId: instance.upstreamId,
|
|
166
|
-
branchIndex: instance.branchIndex
|
|
167
|
-
},
|
|
168
|
-
{ transaction }
|
|
169
|
-
);
|
|
170
|
-
}
|
|
171
175
|
const nodes = await repository.find({
|
|
172
176
|
filter: {
|
|
173
177
|
workflowId: instance.workflowId
|
|
@@ -187,9 +191,69 @@ async function destroy(context, next) {
|
|
|
187
191
|
item.downstream = nodesMap.get(item.downstreamId);
|
|
188
192
|
}
|
|
189
193
|
});
|
|
190
|
-
const
|
|
194
|
+
const keepBranchHead = keepBranchIndex != null ? nodes.find((item) => item.upstreamId === instance.id && item.branchIndex == keepBranchIndex) : null;
|
|
195
|
+
if (keepBranchIndex != null && !keepBranchHead) {
|
|
196
|
+
context.throw(400, `Branch ${keepBranchIndex} not found`);
|
|
197
|
+
}
|
|
198
|
+
const keepBranchNodes = keepBranchHead ? searchBranchDownstreams(nodes, keepBranchHead) : [];
|
|
199
|
+
const keepBranchNodeIds = new Set(keepBranchNodes.map((item) => item.id));
|
|
200
|
+
const branchNodes = instance ? searchBranchNodes(nodes, instance) : [];
|
|
201
|
+
const branchNodesToDelete = keepBranchHead ? branchNodes.filter((item) => !keepBranchNodeIds.has(item.id)) : branchNodes;
|
|
202
|
+
if (keepBranchHead) {
|
|
203
|
+
if (upstream && upstream.downstreamId === instance.id) {
|
|
204
|
+
await upstream.update(
|
|
205
|
+
{
|
|
206
|
+
downstreamId: keepBranchHead.id
|
|
207
|
+
},
|
|
208
|
+
{ transaction }
|
|
209
|
+
);
|
|
210
|
+
}
|
|
211
|
+
await keepBranchHead.update(
|
|
212
|
+
{
|
|
213
|
+
upstreamId: instance.upstreamId,
|
|
214
|
+
branchIndex: instance.branchIndex
|
|
215
|
+
},
|
|
216
|
+
{ transaction }
|
|
217
|
+
);
|
|
218
|
+
if (downstream) {
|
|
219
|
+
const branchTail = findBranchTail(keepBranchHead);
|
|
220
|
+
await branchTail.update(
|
|
221
|
+
{
|
|
222
|
+
downstreamId: instance.downstreamId
|
|
223
|
+
},
|
|
224
|
+
{ transaction }
|
|
225
|
+
);
|
|
226
|
+
branchTail.downstreamId = instance.downstreamId;
|
|
227
|
+
branchTail.downstream = downstream;
|
|
228
|
+
await downstream.update(
|
|
229
|
+
{
|
|
230
|
+
upstreamId: branchTail.id,
|
|
231
|
+
branchIndex: null
|
|
232
|
+
},
|
|
233
|
+
{ transaction }
|
|
234
|
+
);
|
|
235
|
+
}
|
|
236
|
+
} else {
|
|
237
|
+
if (upstream && upstream.downstreamId === instance.id) {
|
|
238
|
+
await upstream.update(
|
|
239
|
+
{
|
|
240
|
+
downstreamId: instance.downstreamId
|
|
241
|
+
},
|
|
242
|
+
{ transaction }
|
|
243
|
+
);
|
|
244
|
+
}
|
|
245
|
+
if (downstream) {
|
|
246
|
+
await downstream.update(
|
|
247
|
+
{
|
|
248
|
+
upstreamId: instance.upstreamId,
|
|
249
|
+
branchIndex: instance.branchIndex
|
|
250
|
+
},
|
|
251
|
+
{ transaction }
|
|
252
|
+
);
|
|
253
|
+
}
|
|
254
|
+
}
|
|
191
255
|
await repository.destroy({
|
|
192
|
-
filterByTk: [instance.id, ...
|
|
256
|
+
filterByTk: [instance.id, ...branchNodesToDelete.map((item) => item.id)],
|
|
193
257
|
transaction
|
|
194
258
|
});
|
|
195
259
|
});
|
package/dist/server/index.d.ts
CHANGED
|
@@ -12,6 +12,7 @@ export * from './instructions';
|
|
|
12
12
|
export * from './functions';
|
|
13
13
|
export * from './logicCalculate';
|
|
14
14
|
export { Trigger } from './triggers';
|
|
15
|
+
export type { EventOptions } from './Dispatcher';
|
|
15
16
|
export { default as Processor } from './Processor';
|
|
16
|
-
export { default
|
|
17
|
+
export { default } from './Plugin';
|
|
17
18
|
export * from './types';
|
package/dist/server/index.js
CHANGED
|
@@ -37,7 +37,6 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
37
37
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
38
38
|
var server_exports = {};
|
|
39
39
|
__export(server_exports, {
|
|
40
|
-
EventOptions: () => import_Plugin.EventOptions,
|
|
41
40
|
Processor: () => import_Processor.default,
|
|
42
41
|
Trigger: () => import_triggers.Trigger,
|
|
43
42
|
default: () => import_Plugin.default
|
|
@@ -54,7 +53,6 @@ var import_Plugin = __toESM(require("./Plugin"));
|
|
|
54
53
|
__reExport(server_exports, require("./types"), module.exports);
|
|
55
54
|
// Annotate the CommonJS export names for ESM import in node:
|
|
56
55
|
0 && (module.exports = {
|
|
57
|
-
EventOptions,
|
|
58
56
|
Processor,
|
|
59
57
|
Trigger,
|
|
60
58
|
...require("./utils"),
|
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
import { Model } from '@nocobase/database';
|
|
10
10
|
import Trigger from '.';
|
|
11
11
|
import type { WorkflowModel } from '../types';
|
|
12
|
-
import type { EventOptions } from '../
|
|
12
|
+
import type { EventOptions } from '../Dispatcher';
|
|
13
13
|
export interface CollectionChangeTriggerConfig {
|
|
14
14
|
collection: string;
|
|
15
15
|
mode: number;
|
|
@@ -6,7 +6,7 @@
|
|
|
6
6
|
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
7
|
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
8
|
*/
|
|
9
|
-
import { Transactionable } from '@nocobase/database';
|
|
9
|
+
import { Model, Transactionable } from '@nocobase/database';
|
|
10
10
|
import type Plugin from '../../Plugin';
|
|
11
11
|
import type { WorkflowModel } from '../../types';
|
|
12
12
|
export type ScheduleOnField = {
|
|
@@ -32,10 +32,10 @@ export default class DateFieldScheduleTrigger {
|
|
|
32
32
|
constructor(workflow: Plugin);
|
|
33
33
|
reload(): void;
|
|
34
34
|
inspect(workflow: WorkflowModel): Promise<void>;
|
|
35
|
-
loadRecordsToSchedule({ id, config: { collection, limit, startsOn, repeat, endsOn }, stats }: WorkflowModel, currentDate: Date): Promise<
|
|
36
|
-
getRecordNextTime(workflow: WorkflowModel, record:
|
|
37
|
-
schedule(workflow: WorkflowModel, record:
|
|
38
|
-
trigger(workflow: WorkflowModel, record:
|
|
35
|
+
loadRecordsToSchedule({ id, config: { collection, limit, startsOn, repeat, endsOn }, stats }: WorkflowModel, currentDate: Date): Promise<Model<any, any>[]>;
|
|
36
|
+
getRecordNextTime(workflow: WorkflowModel, record: Model, nextSecond?: boolean): any;
|
|
37
|
+
schedule(workflow: WorkflowModel, record: Model, nextTime: number, toggle?: boolean, options?: {}): Promise<void>;
|
|
38
|
+
trigger(workflow: WorkflowModel, record: Model, nextTime: number, { transaction }?: Transactionable): Promise<void>;
|
|
39
39
|
on(workflow: WorkflowModel): void;
|
|
40
40
|
off(workflow: WorkflowModel): void;
|
|
41
41
|
execute(workflow: any, values: any, options: any): Promise<void | import("../..").Processor>;
|