@nocobase/plugin-workflow 2.0.0-alpha.9 → 2.1.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/{f68fbc145c3ddec3.js → 80d4cd8911e03c27.js} +1 -1
- package/dist/client/Branch.d.ts +2 -0
- package/dist/client/WorkflowTasks.d.ts +3 -1
- package/dist/client/bfc2a351589613e1.js +10 -0
- package/dist/client/e078314a62391f36.js +10 -0
- package/dist/client/flows/triggerWorkflows.d.ts +15 -42
- package/dist/client/index.js +1 -1
- package/dist/client/nodes/multi-conditions.d.ts +57 -0
- package/dist/client/schemas/executions.d.ts +1 -1
- package/dist/client/variable.d.ts +1 -1
- package/dist/common/collections/executions.d.ts +1 -1
- package/dist/common/collections/executions.js +13 -1
- package/dist/common/collections/jobs.js +4 -0
- package/dist/externalVersion.js +12 -11
- package/dist/locale/de-DE.json +230 -188
- package/dist/locale/en-US.json +233 -188
- package/dist/locale/es-ES.json +238 -78
- package/dist/locale/fr-FR.json +238 -78
- package/dist/locale/hu-HU.json +258 -0
- package/dist/locale/id-ID.json +258 -0
- package/dist/locale/it-IT.json +229 -176
- package/dist/locale/ja-JP.json +236 -164
- package/dist/locale/ko-KR.json +260 -150
- package/dist/locale/nl-NL.json +257 -99
- package/dist/locale/pt-BR.json +238 -78
- package/dist/locale/ru-RU.json +243 -67
- package/dist/locale/tr-TR.json +239 -63
- package/dist/locale/uk-UA.json +258 -0
- package/dist/locale/vi-VN.json +258 -0
- package/dist/locale/zh-CN.json +236 -237
- package/dist/locale/zh-TW.json +258 -0
- package/dist/node_modules/cron-parser/package.json +1 -1
- package/dist/node_modules/lru-cache/package.json +1 -1
- package/dist/node_modules/nodejs-snowflake/package.json +1 -1
- package/dist/server/Dispatcher.d.ts +1 -3
- package/dist/server/Dispatcher.js +30 -30
- package/dist/server/Plugin.d.ts +3 -0
- package/dist/server/Plugin.js +55 -20
- package/dist/server/Processor.js +37 -12
- package/dist/server/actions/index.js +3 -0
- package/dist/server/actions/jobs.d.ts +9 -0
- package/dist/server/actions/jobs.js +64 -0
- package/dist/server/actions/nodes.d.ts +1 -0
- package/dist/server/actions/nodes.js +78 -1
- package/dist/server/instructions/MultiConditionsInstruction.d.ts +18 -0
- package/dist/server/instructions/MultiConditionsInstruction.js +118 -0
- package/dist/server/instructions/index.d.ts +3 -1
- package/dist/server/repositories/WorkflowRepository.js +9 -2
- package/dist/server/triggers/CollectionTrigger.d.ts +3 -0
- package/dist/server/triggers/CollectionTrigger.js +31 -3
- package/dist/server/triggers/ScheduleTrigger/DateFieldScheduleTrigger.js +8 -0
- package/dist/server/triggers/index.d.ts +3 -1
- package/dist/server/types/Job.d.ts +1 -0
- package/package.json +5 -2
- package/dist/client/248e211bb2d99aee.js +0 -10
- package/dist/client/e7c028a099537ab1.js +0 -10
package/dist/server/Plugin.js
CHANGED
|
@@ -36,6 +36,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
36
36
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
37
37
|
var Plugin_exports = {};
|
|
38
38
|
__export(Plugin_exports, {
|
|
39
|
+
WORKER_JOB_WORKFLOW_PROCESS: () => WORKER_JOB_WORKFLOW_PROCESS,
|
|
39
40
|
default: () => PluginWorkflowServer
|
|
40
41
|
});
|
|
41
42
|
module.exports = __toCommonJS(Plugin_exports);
|
|
@@ -58,7 +59,9 @@ var import_CreateInstruction = __toESM(require("./instructions/CreateInstruction
|
|
|
58
59
|
var import_DestroyInstruction = __toESM(require("./instructions/DestroyInstruction"));
|
|
59
60
|
var import_QueryInstruction = __toESM(require("./instructions/QueryInstruction"));
|
|
60
61
|
var import_UpdateInstruction = __toESM(require("./instructions/UpdateInstruction"));
|
|
62
|
+
var import_MultiConditionsInstruction = __toESM(require("./instructions/MultiConditionsInstruction"));
|
|
61
63
|
var import_WorkflowRepository = __toESM(require("./repositories/WorkflowRepository"));
|
|
64
|
+
const WORKER_JOB_WORKFLOW_PROCESS = "workflow:process";
|
|
62
65
|
class PluginWorkflowServer extends import_server.Plugin {
|
|
63
66
|
instructions = new import_utils.Registry();
|
|
64
67
|
triggers = new import_utils.Registry();
|
|
@@ -66,6 +69,9 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
66
69
|
enabledCache = /* @__PURE__ */ new Map();
|
|
67
70
|
snowflake;
|
|
68
71
|
dispatcher = new import_Dispatcher.default(this);
|
|
72
|
+
get channelPendingExecution() {
|
|
73
|
+
return `${this.name}.pendingExecution`;
|
|
74
|
+
}
|
|
69
75
|
loggerCache;
|
|
70
76
|
meter = null;
|
|
71
77
|
checker = null;
|
|
@@ -138,7 +144,6 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
138
144
|
// * add all hooks for enabled workflows
|
|
139
145
|
// * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks
|
|
140
146
|
onAfterStart = async () => {
|
|
141
|
-
this.dispatcher.setReady(true);
|
|
142
147
|
const collection = this.db.getCollection("workflows");
|
|
143
148
|
const workflows = await collection.repository.find({
|
|
144
149
|
appends: ["versionStats"]
|
|
@@ -165,19 +170,20 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
165
170
|
this.app.logger.info("workflow:dispatch");
|
|
166
171
|
this.dispatcher.dispatch();
|
|
167
172
|
});
|
|
173
|
+
this.dispatcher.setReady(true);
|
|
168
174
|
this.getLogger("dispatcher").info("(starting) check for queueing executions");
|
|
169
175
|
this.dispatcher.dispatch();
|
|
170
|
-
this.dispatcher.setReady(true);
|
|
171
176
|
};
|
|
172
177
|
onBeforeStop = async () => {
|
|
178
|
+
if (this.checker) {
|
|
179
|
+
clearInterval(this.checker);
|
|
180
|
+
}
|
|
181
|
+
await this.dispatcher.beforeStop();
|
|
173
182
|
this.app.logger.info(`stopping workflow plugin before app (${this.app.name}) shutdown...`);
|
|
174
183
|
for (const workflow of this.enabledCache.values()) {
|
|
175
184
|
this.toggle(workflow, false, { silent: true });
|
|
176
185
|
}
|
|
177
|
-
|
|
178
|
-
if (this.checker) {
|
|
179
|
-
clearInterval(this.checker);
|
|
180
|
-
}
|
|
186
|
+
this.app.eventQueue.unsubscribe(this.channelPendingExecution);
|
|
181
187
|
this.loggerCache.clear();
|
|
182
188
|
};
|
|
183
189
|
async handleSyncMessage(message) {
|
|
@@ -202,13 +208,16 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
202
208
|
}
|
|
203
209
|
}
|
|
204
210
|
}
|
|
211
|
+
serving() {
|
|
212
|
+
return this.app.serving(WORKER_JOB_WORKFLOW_PROCESS);
|
|
213
|
+
}
|
|
205
214
|
/**
|
|
206
215
|
* @experimental
|
|
207
216
|
*/
|
|
208
217
|
getLogger(workflowId = "dispatcher") {
|
|
209
218
|
const now = /* @__PURE__ */ new Date();
|
|
210
219
|
const date = `${now.getFullYear()}-${`0${now.getMonth() + 1}`.slice(-2)}-${`0${now.getDate()}`.slice(-2)}`;
|
|
211
|
-
const key = `${date}-${workflowId}
|
|
220
|
+
const key = `${date}-${workflowId}`;
|
|
212
221
|
if (this.loggerCache.has(key)) {
|
|
213
222
|
return this.loggerCache.get(key);
|
|
214
223
|
}
|
|
@@ -259,6 +268,7 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
259
268
|
initInstructions(more = {}) {
|
|
260
269
|
this.registerInstruction("calculation", import_CalculationInstruction.default);
|
|
261
270
|
this.registerInstruction("condition", import_ConditionInstruction.default);
|
|
271
|
+
this.registerInstruction("multi-conditions", import_MultiConditionsInstruction.default);
|
|
262
272
|
this.registerInstruction("end", import_EndInstruction.default);
|
|
263
273
|
this.registerInstruction("create", import_CreateInstruction.default);
|
|
264
274
|
this.registerInstruction("destroy", import_DestroyInstruction.default);
|
|
@@ -279,10 +289,6 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
279
289
|
this.snowflake = new import_nodejs_snowflake.Snowflake({
|
|
280
290
|
custom_epoch: pluginRecord == null ? void 0 : pluginRecord.createdAt.getTime()
|
|
281
291
|
});
|
|
282
|
-
this.app.backgroundJobManager.subscribe(`${this.name}.pendingExecution`, {
|
|
283
|
-
idle: () => this.app.serving(import_Dispatcher.WORKER_JOB_WORKFLOW_PROCESS) && this.dispatcher.idle,
|
|
284
|
-
process: this.dispatcher.onQueueExecution
|
|
285
|
-
});
|
|
286
292
|
}
|
|
287
293
|
/**
|
|
288
294
|
* @internal
|
|
@@ -293,18 +299,30 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
293
299
|
this.initTriggers(options.triggers);
|
|
294
300
|
this.initInstructions(options.instructions);
|
|
295
301
|
(0, import_functions.default)(this, options.functions);
|
|
302
|
+
this.functions.register("instanceId", () => this.app.instanceId);
|
|
303
|
+
this.functions.register("epoch", () => 1605024e3);
|
|
304
|
+
this.functions.register("genSnowflakeId", () => this.app.snowflakeIdGenerator.generate());
|
|
296
305
|
this.loggerCache = new import_lru_cache.default({
|
|
297
306
|
max: 20,
|
|
298
307
|
updateAgeOnGet: true,
|
|
299
308
|
dispose(logger) {
|
|
300
|
-
logger
|
|
309
|
+
const cachedLogger = logger;
|
|
310
|
+
if (!cachedLogger) {
|
|
311
|
+
return;
|
|
312
|
+
}
|
|
313
|
+
cachedLogger.silent = true;
|
|
314
|
+
if (typeof cachedLogger.close === "function") {
|
|
315
|
+
cachedLogger.close();
|
|
316
|
+
}
|
|
301
317
|
}
|
|
302
318
|
});
|
|
303
319
|
this.meter = this.app.telemetry.metric.getMeter();
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
320
|
+
if (this.meter) {
|
|
321
|
+
const counter = this.meter.createObservableGauge("workflow.events.counter");
|
|
322
|
+
counter.addCallback((result) => {
|
|
323
|
+
result.observe(this.dispatcher.getEventsCount());
|
|
324
|
+
});
|
|
325
|
+
}
|
|
308
326
|
this.app.acl.registerSnippet({
|
|
309
327
|
name: `pm.${this.name}.workflows`,
|
|
310
328
|
actions: [
|
|
@@ -316,6 +334,7 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
316
334
|
"executions:destroy",
|
|
317
335
|
"flow_nodes:update",
|
|
318
336
|
"flow_nodes:destroy",
|
|
337
|
+
"flow_nodes:destroyBranch",
|
|
319
338
|
"flow_nodes:test",
|
|
320
339
|
"jobs:get",
|
|
321
340
|
"workflowCategories:*"
|
|
@@ -333,12 +352,18 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
333
352
|
db.on("workflows.afterDestroy", this.onAfterDestroy);
|
|
334
353
|
this.app.on("afterStart", this.onAfterStart);
|
|
335
354
|
this.app.on("beforeStop", this.onBeforeStop);
|
|
355
|
+
this.app.eventQueue.subscribe(this.channelPendingExecution, {
|
|
356
|
+
idle: () => this.serving() && this.dispatcher.idle,
|
|
357
|
+
process: this.dispatcher.onQueueExecution
|
|
358
|
+
});
|
|
336
359
|
}
|
|
337
360
|
toggle(workflow, enable, { silent, transaction } = {}) {
|
|
338
361
|
const type = workflow.get("type");
|
|
339
362
|
const trigger = this.triggers.get(type);
|
|
340
363
|
if (!trigger) {
|
|
341
|
-
this.getLogger(workflow.id).error(`trigger type ${workflow.type} of workflow ${workflow.id} is not implemented
|
|
364
|
+
this.getLogger(workflow.id).error(`trigger type ${workflow.type} of workflow ${workflow.id} is not implemented`, {
|
|
365
|
+
workflowId: workflow.id
|
|
366
|
+
});
|
|
342
367
|
return;
|
|
343
368
|
}
|
|
344
369
|
const next = enable ?? workflow.get("enabled");
|
|
@@ -346,14 +371,20 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
346
371
|
const prev = workflow.previous();
|
|
347
372
|
if (prev.config) {
|
|
348
373
|
trigger.off({ ...workflow.get(), ...prev });
|
|
349
|
-
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id} based on configuration before updated
|
|
374
|
+
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id} based on configuration before updated`, {
|
|
375
|
+
workflowId: workflow.id
|
|
376
|
+
});
|
|
350
377
|
}
|
|
351
378
|
trigger.on(workflow);
|
|
352
|
-
this.getLogger(workflow.id).info(`toggle ON workflow ${workflow.id}
|
|
379
|
+
this.getLogger(workflow.id).info(`toggle ON workflow ${workflow.id}`, {
|
|
380
|
+
workflowId: workflow.id
|
|
381
|
+
});
|
|
353
382
|
this.enabledCache.set(workflow.id, workflow);
|
|
354
383
|
} else {
|
|
355
384
|
trigger.off(workflow);
|
|
356
|
-
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id}
|
|
385
|
+
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id}`, {
|
|
386
|
+
workflowId: workflow.id
|
|
387
|
+
});
|
|
357
388
|
this.enabledCache.delete(workflow.id);
|
|
358
389
|
}
|
|
359
390
|
if (!silent) {
|
|
@@ -453,3 +484,7 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
453
484
|
}
|
|
454
485
|
}
|
|
455
486
|
}
|
|
487
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
488
|
+
0 && (module.exports = {
|
|
489
|
+
WORKER_JOB_WORKFLOW_PROCESS
|
|
490
|
+
});
|
package/dist/server/Processor.js
CHANGED
|
@@ -102,11 +102,15 @@ class Processor {
|
|
|
102
102
|
});
|
|
103
103
|
}
|
|
104
104
|
makeJobs(jobs) {
|
|
105
|
-
|
|
105
|
+
for (const job of jobs) {
|
|
106
106
|
const node = this.nodesMap.get(job.nodeId);
|
|
107
|
+
if (!node) {
|
|
108
|
+
this.logger.warn(`node (#${job.nodeId}) not found for job (#${job.id}), this will lead to unexpected error`);
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
107
111
|
this.jobsMapByNodeKey[node.key] = job;
|
|
108
112
|
this.jobResultsMapByNodeKey[node.key] = job.result;
|
|
109
|
-
}
|
|
113
|
+
}
|
|
110
114
|
}
|
|
111
115
|
async prepare() {
|
|
112
116
|
const {
|
|
@@ -144,7 +148,9 @@ class Processor {
|
|
|
144
148
|
async start() {
|
|
145
149
|
const { execution } = this;
|
|
146
150
|
if (execution.status) {
|
|
147
|
-
this.logger.warn(`execution was ended with status ${execution.status} before, could not be started again
|
|
151
|
+
this.logger.warn(`execution was ended with status ${execution.status} before, could not be started again`, {
|
|
152
|
+
workflowId: execution.workflowId
|
|
153
|
+
});
|
|
148
154
|
return;
|
|
149
155
|
}
|
|
150
156
|
await this.prepare();
|
|
@@ -158,7 +164,9 @@ class Processor {
|
|
|
158
164
|
async resume(job) {
|
|
159
165
|
const { execution } = this;
|
|
160
166
|
if (execution.status) {
|
|
161
|
-
this.logger.warn(`execution was ended with status ${execution.status} before, could not be resumed
|
|
167
|
+
this.logger.warn(`execution was ended with status ${execution.status} before, could not be resumed`, {
|
|
168
|
+
workflowId: execution.workflowId
|
|
169
|
+
});
|
|
162
170
|
return;
|
|
163
171
|
}
|
|
164
172
|
await this.prepare();
|
|
@@ -168,7 +176,7 @@ class Processor {
|
|
|
168
176
|
async exec(instruction, node, prevJob) {
|
|
169
177
|
let job;
|
|
170
178
|
try {
|
|
171
|
-
this.logger.debug(`config of node`, { data: node.config });
|
|
179
|
+
this.logger.debug(`config of node`, { data: node.config, workflowId: node.workflowId });
|
|
172
180
|
job = await instruction(node, prevJob, this);
|
|
173
181
|
if (job === null) {
|
|
174
182
|
return this.exit();
|
|
@@ -179,7 +187,7 @@ class Processor {
|
|
|
179
187
|
} catch (err) {
|
|
180
188
|
this.logger.error(
|
|
181
189
|
`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) failed: `,
|
|
182
|
-
err
|
|
190
|
+
{ error: err, workflowId: node.workflowId }
|
|
183
191
|
);
|
|
184
192
|
job = {
|
|
185
193
|
result: err instanceof Error ? {
|
|
@@ -199,7 +207,10 @@ class Processor {
|
|
|
199
207
|
}
|
|
200
208
|
const savedJob = this.saveJob(job);
|
|
201
209
|
this.logger.info(
|
|
202
|
-
`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) finished as status: ${savedJob.status}
|
|
210
|
+
`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) finished as status: ${savedJob.status}`,
|
|
211
|
+
{
|
|
212
|
+
workflowId: node.workflowId
|
|
213
|
+
}
|
|
203
214
|
);
|
|
204
215
|
this.logger.debug(`result of node`, { data: savedJob.result });
|
|
205
216
|
if (savedJob.status === import_constants.JOB_STATUS.RESOLVED && node.downstream) {
|
|
@@ -217,7 +228,9 @@ class Processor {
|
|
|
217
228
|
if (typeof instruction.run !== "function") {
|
|
218
229
|
return Promise.reject(new Error("`run` should be implemented for customized execution of the node"));
|
|
219
230
|
}
|
|
220
|
-
this.logger.info(`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id})
|
|
231
|
+
this.logger.info(`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id})`, {
|
|
232
|
+
workflowId: node.workflowId
|
|
233
|
+
});
|
|
221
234
|
return this.exec(instruction.run.bind(instruction), node, input);
|
|
222
235
|
}
|
|
223
236
|
// parent node should take over the control
|
|
@@ -225,7 +238,9 @@ class Processor {
|
|
|
225
238
|
this.logger.debug(`branch ended at node (${node.id})`);
|
|
226
239
|
const parentNode = this.findBranchParentNode(node);
|
|
227
240
|
if (parentNode) {
|
|
228
|
-
this.logger.debug(`not on main, recall to parent entry node (${node.id})})
|
|
241
|
+
this.logger.debug(`not on main, recall to parent entry node (${node.id})})`, {
|
|
242
|
+
workflowId: node.workflowId
|
|
243
|
+
});
|
|
229
244
|
await this.recall(parentNode, job);
|
|
230
245
|
return null;
|
|
231
246
|
}
|
|
@@ -242,7 +257,9 @@ class Processor {
|
|
|
242
257
|
new Error(`"resume" method should be implemented for [${node.type}] instruction of node (#${node.id})`)
|
|
243
258
|
);
|
|
244
259
|
}
|
|
245
|
-
this.logger.info(`execution (${this.execution.id}) resume instruction [${node.type}] for node (${node.id})
|
|
260
|
+
this.logger.info(`execution (${this.execution.id}) resume instruction [${node.type}] for node (${node.id})`, {
|
|
261
|
+
workflowId: node.workflowId
|
|
262
|
+
});
|
|
246
263
|
return this.exec(instruction.resume.bind(instruction), node, job);
|
|
247
264
|
}
|
|
248
265
|
async exit(s) {
|
|
@@ -261,6 +278,10 @@ class Processor {
|
|
|
261
278
|
changes.push([`status`, job.status]);
|
|
262
279
|
job.changed("status", false);
|
|
263
280
|
}
|
|
281
|
+
if (job.changed("meta")) {
|
|
282
|
+
changes.push([`meta`, JSON.stringify(job.meta ?? null)]);
|
|
283
|
+
job.changed("meta", false);
|
|
284
|
+
}
|
|
264
285
|
if (job.changed("result")) {
|
|
265
286
|
changes.push([`result`, JSON.stringify(job.result ?? null)]);
|
|
266
287
|
job.changed("result", false);
|
|
@@ -295,7 +316,9 @@ class Processor {
|
|
|
295
316
|
if (this.mainTransaction && this.mainTransaction !== this.transaction) {
|
|
296
317
|
await this.mainTransaction.commit();
|
|
297
318
|
}
|
|
298
|
-
this.logger.info(`execution (${this.execution.id}) exiting with status ${this.execution.status}
|
|
319
|
+
this.logger.info(`execution (${this.execution.id}) exiting with status ${this.execution.status}`, {
|
|
320
|
+
workflowId: this.execution.workflowId
|
|
321
|
+
});
|
|
299
322
|
return null;
|
|
300
323
|
}
|
|
301
324
|
/**
|
|
@@ -326,7 +349,9 @@ class Processor {
|
|
|
326
349
|
this.lastSavedJob = job;
|
|
327
350
|
this.jobsMapByNodeKey[job.nodeKey] = job;
|
|
328
351
|
this.jobResultsMapByNodeKey[job.nodeKey] = job.result;
|
|
329
|
-
this.logger.debug(`job added to save list: ${JSON.stringify(job)}
|
|
352
|
+
this.logger.debug(`job added to save list: ${JSON.stringify(job)}`, {
|
|
353
|
+
workflowId: this.execution.workflowId
|
|
354
|
+
});
|
|
330
355
|
return job;
|
|
331
356
|
}
|
|
332
357
|
/**
|
|
@@ -41,6 +41,7 @@ __export(actions_exports, {
|
|
|
41
41
|
module.exports = __toCommonJS(actions_exports);
|
|
42
42
|
var workflows = __toESM(require("./workflows"));
|
|
43
43
|
var nodes = __toESM(require("./nodes"));
|
|
44
|
+
var jobs = __toESM(require("./jobs"));
|
|
44
45
|
var executions = __toESM(require("./executions"));
|
|
45
46
|
var userWorkflowTasks = __toESM(require("./userWorkflowTasks"));
|
|
46
47
|
function make(name, mod) {
|
|
@@ -61,8 +62,10 @@ function actions_default({ app }) {
|
|
|
61
62
|
...make("flow_nodes", {
|
|
62
63
|
update: nodes.update,
|
|
63
64
|
destroy: nodes.destroy,
|
|
65
|
+
destroyBranch: nodes.destroyBranch,
|
|
64
66
|
test: nodes.test
|
|
65
67
|
}),
|
|
68
|
+
...make("jobs", jobs),
|
|
66
69
|
...make("executions", executions),
|
|
67
70
|
...make("userWorkflowTasks", userWorkflowTasks)
|
|
68
71
|
});
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file is part of the NocoBase (R) project.
|
|
3
|
+
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
|
|
4
|
+
* Authors: NocoBase Team.
|
|
5
|
+
*
|
|
6
|
+
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
|
+
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
|
+
*/
|
|
9
|
+
export declare function resume(context: any, next: any): Promise<any>;
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file is part of the NocoBase (R) project.
|
|
3
|
+
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
|
|
4
|
+
* Authors: NocoBase Team.
|
|
5
|
+
*
|
|
6
|
+
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
|
+
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
var __create = Object.create;
|
|
11
|
+
var __defProp = Object.defineProperty;
|
|
12
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
13
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
14
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
15
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
16
|
+
var __export = (target, all) => {
|
|
17
|
+
for (var name in all)
|
|
18
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
19
|
+
};
|
|
20
|
+
var __copyProps = (to, from, except, desc) => {
|
|
21
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
22
|
+
for (let key of __getOwnPropNames(from))
|
|
23
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
24
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
25
|
+
}
|
|
26
|
+
return to;
|
|
27
|
+
};
|
|
28
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
29
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
30
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
31
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
32
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
33
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
34
|
+
mod
|
|
35
|
+
));
|
|
36
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
37
|
+
var jobs_exports = {};
|
|
38
|
+
__export(jobs_exports, {
|
|
39
|
+
resume: () => resume
|
|
40
|
+
});
|
|
41
|
+
module.exports = __toCommonJS(jobs_exports);
|
|
42
|
+
var import_actions = require("@nocobase/actions");
|
|
43
|
+
var import_Plugin = __toESM(require("../Plugin"));
|
|
44
|
+
async function resume(context, next) {
|
|
45
|
+
const repository = import_actions.utils.getRepositoryFromParams(context);
|
|
46
|
+
const workflowPlugin = context.app.pm.get(import_Plugin.default);
|
|
47
|
+
const { filterByTk, values = {} } = context.action.params;
|
|
48
|
+
const job = await repository.findOne({
|
|
49
|
+
filterByTk
|
|
50
|
+
});
|
|
51
|
+
if (!job) {
|
|
52
|
+
return context.throw(404, "Job not found");
|
|
53
|
+
}
|
|
54
|
+
workflowPlugin.getLogger(job.workflowId).warn(`Resuming job #${job.id}...`);
|
|
55
|
+
await job.update(values);
|
|
56
|
+
context.body = job;
|
|
57
|
+
context.status = 202;
|
|
58
|
+
await next();
|
|
59
|
+
workflowPlugin.resume(job);
|
|
60
|
+
}
|
|
61
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
62
|
+
0 && (module.exports = {
|
|
63
|
+
resume
|
|
64
|
+
});
|
|
@@ -9,5 +9,6 @@
|
|
|
9
9
|
import { Context } from '@nocobase/actions';
|
|
10
10
|
export declare function create(context: Context, next: any): Promise<void>;
|
|
11
11
|
export declare function destroy(context: Context, next: any): Promise<void>;
|
|
12
|
+
export declare function destroyBranch(context: Context, next: any): Promise<void>;
|
|
12
13
|
export declare function update(context: Context, next: any): Promise<void>;
|
|
13
14
|
export declare function test(context: Context, next: any): Promise<void>;
|
|
@@ -38,6 +38,7 @@ var nodes_exports = {};
|
|
|
38
38
|
__export(nodes_exports, {
|
|
39
39
|
create: () => create,
|
|
40
40
|
destroy: () => destroy,
|
|
41
|
+
destroyBranch: () => destroyBranch,
|
|
41
42
|
test: () => test,
|
|
42
43
|
update: () => update
|
|
43
44
|
});
|
|
@@ -158,7 +159,7 @@ async function destroy(context, next) {
|
|
|
158
159
|
const repository = import_actions.utils.getRepositoryFromParams(context);
|
|
159
160
|
const { filterByTk, keepBranch } = context.action.params;
|
|
160
161
|
const keepBranchIndex = keepBranch == null || keepBranch === "" ? null : Number.parseInt(keepBranch, 10);
|
|
161
|
-
const fields = ["id", "upstreamId", "downstreamId", "branchIndex"];
|
|
162
|
+
const fields = ["id", "upstreamId", "downstreamId", "branchIndex", "key"];
|
|
162
163
|
const instance = await repository.findOne({
|
|
163
164
|
filterByTk,
|
|
164
165
|
fields: [...fields, "workflowId"],
|
|
@@ -260,6 +261,81 @@ async function destroy(context, next) {
|
|
|
260
261
|
context.body = instance;
|
|
261
262
|
await next();
|
|
262
263
|
}
|
|
264
|
+
async function destroyBranch(context, next) {
|
|
265
|
+
const { db } = context;
|
|
266
|
+
const repository = import_actions.utils.getRepositoryFromParams(context);
|
|
267
|
+
const { filterByTk, branchIndex: branchIndexParam, shift: shiftParam } = context.action.params;
|
|
268
|
+
if (branchIndexParam == null || branchIndexParam === "") {
|
|
269
|
+
context.throw(400, "branchIndex is required");
|
|
270
|
+
}
|
|
271
|
+
const branchIndex = Number.parseInt(branchIndexParam, 10);
|
|
272
|
+
if (Number.isNaN(branchIndex)) {
|
|
273
|
+
context.throw(400, "branchIndex must be a number");
|
|
274
|
+
}
|
|
275
|
+
const shift = !(shiftParam == null || shiftParam === "") && Number.parseInt(String(shiftParam), 10) === 1;
|
|
276
|
+
const fields = ["id", "upstreamId", "downstreamId", "branchIndex", "key"];
|
|
277
|
+
const instance = await repository.findOne({
|
|
278
|
+
filterByTk,
|
|
279
|
+
fields: [...fields, "workflowId"],
|
|
280
|
+
appends: ["workflow.versionStats.executed"]
|
|
281
|
+
});
|
|
282
|
+
if (!instance) {
|
|
283
|
+
context.throw(404, "Node not found");
|
|
284
|
+
}
|
|
285
|
+
if (instance.workflow.versionStats.executed > 0) {
|
|
286
|
+
context.throw(400, "Branches in executed workflow could not be deleted");
|
|
287
|
+
}
|
|
288
|
+
let deletedBranchHead = null;
|
|
289
|
+
await db.sequelize.transaction(async (transaction) => {
|
|
290
|
+
const nodes = await repository.find({
|
|
291
|
+
filter: {
|
|
292
|
+
workflowId: instance.workflowId
|
|
293
|
+
},
|
|
294
|
+
fields,
|
|
295
|
+
transaction
|
|
296
|
+
});
|
|
297
|
+
const nodesMap = /* @__PURE__ */ new Map();
|
|
298
|
+
nodes.forEach((item) => {
|
|
299
|
+
nodesMap.set(item.id, item);
|
|
300
|
+
});
|
|
301
|
+
nodes.forEach((item) => {
|
|
302
|
+
if (item.upstreamId) {
|
|
303
|
+
item.upstream = nodesMap.get(item.upstreamId);
|
|
304
|
+
}
|
|
305
|
+
if (item.downstreamId) {
|
|
306
|
+
item.downstream = nodesMap.get(item.downstreamId);
|
|
307
|
+
}
|
|
308
|
+
});
|
|
309
|
+
const branchHeads = nodes.filter((item) => item.upstreamId === instance.id && item.branchIndex != null).sort((a, b) => a.branchIndex - b.branchIndex);
|
|
310
|
+
const branchHead = branchHeads.find((item) => item.branchIndex === branchIndex);
|
|
311
|
+
deletedBranchHead = branchHead || null;
|
|
312
|
+
if (branchHead) {
|
|
313
|
+
const nodesToDelete = searchBranchDownstreams(nodes, branchHead);
|
|
314
|
+
const idsToDelete = nodesToDelete.map((item) => item.id);
|
|
315
|
+
if (idsToDelete.length) {
|
|
316
|
+
await repository.destroy({
|
|
317
|
+
filterByTk: idsToDelete,
|
|
318
|
+
transaction
|
|
319
|
+
});
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
if (shift) {
|
|
323
|
+
const headsToShift = branchHeads.filter((item) => item.branchIndex > branchIndex);
|
|
324
|
+
await Promise.all(
|
|
325
|
+
headsToShift.map(
|
|
326
|
+
(item) => item.update(
|
|
327
|
+
{
|
|
328
|
+
branchIndex: item.branchIndex - 1
|
|
329
|
+
},
|
|
330
|
+
{ transaction }
|
|
331
|
+
)
|
|
332
|
+
)
|
|
333
|
+
);
|
|
334
|
+
}
|
|
335
|
+
});
|
|
336
|
+
context.body = deletedBranchHead;
|
|
337
|
+
await next();
|
|
338
|
+
}
|
|
263
339
|
async function update(context, next) {
|
|
264
340
|
const { db } = context;
|
|
265
341
|
const repository = import_actions.utils.getRepositoryFromParams(context);
|
|
@@ -308,6 +384,7 @@ async function test(context, next) {
|
|
|
308
384
|
0 && (module.exports = {
|
|
309
385
|
create,
|
|
310
386
|
destroy,
|
|
387
|
+
destroyBranch,
|
|
311
388
|
test,
|
|
312
389
|
update
|
|
313
390
|
});
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file is part of the NocoBase (R) project.
|
|
3
|
+
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
|
|
4
|
+
* Authors: NocoBase Team.
|
|
5
|
+
*
|
|
6
|
+
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
|
+
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
|
+
*/
|
|
9
|
+
import { Instruction } from '.';
|
|
10
|
+
import type Processor from '../Processor';
|
|
11
|
+
import type { FlowNodeModel, JobModel } from '../types';
|
|
12
|
+
export declare class MultiConditionsInstruction extends Instruction {
|
|
13
|
+
run(node: FlowNodeModel, prevJob: any, processor: Processor): Promise<JobModel>;
|
|
14
|
+
resume(node: FlowNodeModel, branchJob: JobModel, processor: Processor): Promise<any>;
|
|
15
|
+
private evaluateCondition;
|
|
16
|
+
private getBranchNode;
|
|
17
|
+
}
|
|
18
|
+
export default MultiConditionsInstruction;
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file is part of the NocoBase (R) project.
|
|
3
|
+
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
|
|
4
|
+
* Authors: NocoBase Team.
|
|
5
|
+
*
|
|
6
|
+
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
|
+
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
var __defProp = Object.defineProperty;
|
|
11
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
12
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
13
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
14
|
+
var __export = (target, all) => {
|
|
15
|
+
for (var name in all)
|
|
16
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
17
|
+
};
|
|
18
|
+
var __copyProps = (to, from, except, desc) => {
|
|
19
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
20
|
+
for (let key of __getOwnPropNames(from))
|
|
21
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
22
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
23
|
+
}
|
|
24
|
+
return to;
|
|
25
|
+
};
|
|
26
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
27
|
+
var MultiConditionsInstruction_exports = {};
|
|
28
|
+
__export(MultiConditionsInstruction_exports, {
|
|
29
|
+
MultiConditionsInstruction: () => MultiConditionsInstruction,
|
|
30
|
+
default: () => MultiConditionsInstruction_default
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(MultiConditionsInstruction_exports);
|
|
33
|
+
var import_evaluators = require("@nocobase/evaluators");
|
|
34
|
+
var import__ = require(".");
|
|
35
|
+
var import_constants = require("../constants");
|
|
36
|
+
var import_logicCalculate = require("../logicCalculate");
|
|
37
|
+
class MultiConditionsInstruction extends import__.Instruction {
|
|
38
|
+
async run(node, prevJob, processor) {
|
|
39
|
+
const { conditions = [], continueOnNoMatch = false } = node.config || {};
|
|
40
|
+
const meta = { conditions: [] };
|
|
41
|
+
const job = processor.saveJob({
|
|
42
|
+
status: import_constants.JOB_STATUS.PENDING,
|
|
43
|
+
result: null,
|
|
44
|
+
meta,
|
|
45
|
+
nodeId: node.id,
|
|
46
|
+
nodeKey: node.key,
|
|
47
|
+
upstreamId: (prevJob == null ? void 0 : prevJob.id) ?? null
|
|
48
|
+
});
|
|
49
|
+
for (let cursor = 0; cursor < conditions.length; cursor++) {
|
|
50
|
+
const branchIndex = cursor + 1;
|
|
51
|
+
const condition = conditions[cursor];
|
|
52
|
+
let conditionResult;
|
|
53
|
+
try {
|
|
54
|
+
conditionResult = this.evaluateCondition(condition, node, processor);
|
|
55
|
+
} catch (error) {
|
|
56
|
+
conditionResult = error instanceof Error ? error.message : String(error);
|
|
57
|
+
processor.logger.error(`[multi-conditions] evaluate condition[${cursor}] error:`, { error });
|
|
58
|
+
} finally {
|
|
59
|
+
meta.conditions.push(conditionResult);
|
|
60
|
+
job.set("result", conditionResult);
|
|
61
|
+
}
|
|
62
|
+
if (typeof conditionResult === "string") {
|
|
63
|
+
job.set("status", import_constants.JOB_STATUS.ERROR);
|
|
64
|
+
return job;
|
|
65
|
+
}
|
|
66
|
+
if (conditionResult === true) {
|
|
67
|
+
const branchNode = this.getBranchNode(node, processor, branchIndex);
|
|
68
|
+
job.set("status", import_constants.JOB_STATUS.RESOLVED);
|
|
69
|
+
if (branchNode) {
|
|
70
|
+
await processor.run(branchNode, job);
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
return job;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
job.set("status", continueOnNoMatch ? import_constants.JOB_STATUS.RESOLVED : import_constants.JOB_STATUS.FAILED);
|
|
77
|
+
const defaultBranch = this.getBranchNode(node, processor, 0);
|
|
78
|
+
if (defaultBranch) {
|
|
79
|
+
await processor.run(defaultBranch, job);
|
|
80
|
+
return;
|
|
81
|
+
}
|
|
82
|
+
return job;
|
|
83
|
+
}
|
|
84
|
+
async resume(node, branchJob, processor) {
|
|
85
|
+
const job = processor.findBranchParentJob(branchJob, node);
|
|
86
|
+
if (!job) {
|
|
87
|
+
throw new Error("Parent job not found");
|
|
88
|
+
}
|
|
89
|
+
const { continueOnNoMatch = false } = node.config || {};
|
|
90
|
+
const jobNode = processor.nodesMap.get(branchJob.nodeId);
|
|
91
|
+
const branchStartNode = processor.findBranchStartNode(jobNode, node);
|
|
92
|
+
const branchIndex = branchStartNode.branchIndex;
|
|
93
|
+
if (branchJob.status === import_constants.JOB_STATUS.RESOLVED) {
|
|
94
|
+
if (branchIndex > 0) {
|
|
95
|
+
job.set({
|
|
96
|
+
status: import_constants.JOB_STATUS.RESOLVED
|
|
97
|
+
});
|
|
98
|
+
return job;
|
|
99
|
+
}
|
|
100
|
+
job.set({ status: continueOnNoMatch ? import_constants.JOB_STATUS.RESOLVED : import_constants.JOB_STATUS.FAILED });
|
|
101
|
+
return job;
|
|
102
|
+
}
|
|
103
|
+
return processor.exit(branchJob.status);
|
|
104
|
+
}
|
|
105
|
+
evaluateCondition(condition, node, processor) {
|
|
106
|
+
const { engine = "basic", calculation, expression } = condition ?? {};
|
|
107
|
+
const evaluator = import_evaluators.evaluators.get(engine);
|
|
108
|
+
return evaluator ? evaluator(expression, processor.getScope(node.id)) : (0, import_logicCalculate.logicCalculate)(processor.getParsedValue(calculation, node.id));
|
|
109
|
+
}
|
|
110
|
+
getBranchNode(node, processor, branchIndex) {
|
|
111
|
+
return processor.getBranches(node).find((item) => Number(item.branchIndex) === Number(branchIndex));
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
var MultiConditionsInstruction_default = MultiConditionsInstruction;
|
|
115
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
116
|
+
0 && (module.exports = {
|
|
117
|
+
MultiConditionsInstruction
|
|
118
|
+
});
|
|
@@ -21,7 +21,9 @@ export type InstructionInterface = {
|
|
|
21
21
|
run: Runner;
|
|
22
22
|
resume?: Runner;
|
|
23
23
|
getScope?: (node: FlowNodeModel, data: any, processor: Processor) => any;
|
|
24
|
-
duplicateConfig?: (node: FlowNodeModel, options: Transactionable
|
|
24
|
+
duplicateConfig?: (node: FlowNodeModel, options: Transactionable & {
|
|
25
|
+
origin?: FlowNodeModel;
|
|
26
|
+
}) => object | Promise<object>;
|
|
25
27
|
test?: (config: Record<string, any>) => IJob | Promise<IJob>;
|
|
26
28
|
};
|
|
27
29
|
export declare abstract class Instruction implements InstructionInterface {
|