@nocobase/plugin-workflow 2.0.0-alpha.5 → 2.0.0-alpha.50
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/16ee3f25c89840fb.js +10 -0
- package/dist/client/7f42af110b1baadd.js +10 -0
- package/dist/client/{f68fbc145c3ddec3.js → 80d4cd8911e03c27.js} +1 -1
- package/dist/client/Branch.d.ts +2 -0
- package/dist/client/RemoveNodeContext.d.ts +11 -0
- package/dist/client/WorkflowTasks.d.ts +1 -0
- package/dist/client/flows/triggerWorkflows.d.ts +15 -42
- package/dist/client/index.js +1 -1
- package/dist/client/nodes/condition.d.ts +0 -3
- package/dist/client/nodes/multi-conditions.d.ts +57 -0
- package/dist/client/schemas/executions.d.ts +1 -1
- package/dist/common/collections/executions.d.ts +1 -1
- package/dist/common/collections/executions.js +13 -1
- package/dist/common/collections/jobs.js +4 -0
- package/dist/externalVersion.js +11 -11
- package/dist/locale/de-DE.json +231 -189
- package/dist/locale/en-US.json +231 -189
- package/dist/locale/es-ES.json +239 -79
- package/dist/locale/fr-FR.json +239 -79
- package/dist/locale/hu-HU.json +258 -0
- package/dist/locale/id-ID.json +258 -0
- package/dist/locale/it-IT.json +230 -177
- package/dist/locale/ja-JP.json +237 -165
- package/dist/locale/ko-KR.json +262 -151
- package/dist/locale/nl-NL.json +255 -98
- package/dist/locale/pt-BR.json +239 -79
- package/dist/locale/ru-RU.json +244 -68
- package/dist/locale/tr-TR.json +240 -64
- package/dist/locale/uk-UA.json +258 -0
- package/dist/locale/vi-VN.json +258 -0
- package/dist/locale/zh-CN.json +231 -227
- package/dist/locale/zh-TW.json +258 -0
- package/dist/node_modules/cron-parser/package.json +1 -1
- package/dist/node_modules/lru-cache/package.json +1 -1
- package/dist/node_modules/nodejs-snowflake/package.json +1 -1
- package/dist/server/Dispatcher.d.ts +1 -3
- package/dist/server/Dispatcher.js +30 -30
- package/dist/server/Plugin.d.ts +3 -0
- package/dist/server/Plugin.js +52 -20
- package/dist/server/Processor.js +31 -10
- package/dist/server/actions/index.js +1 -0
- package/dist/server/actions/nodes.d.ts +1 -0
- package/dist/server/actions/nodes.js +148 -17
- package/dist/server/instructions/MultiConditionsInstruction.d.ts +18 -0
- package/dist/server/instructions/MultiConditionsInstruction.js +118 -0
- package/dist/server/triggers/CollectionTrigger.js +15 -3
- package/dist/server/triggers/ScheduleTrigger/DateFieldScheduleTrigger.js +8 -0
- package/dist/server/types/Job.d.ts +1 -0
- package/package.json +5 -2
- package/dist/client/4985975bcaea35eb.js +0 -10
- package/dist/client/91bf4b18d5aad6a7.js +0 -10
package/dist/server/Plugin.js
CHANGED
|
@@ -36,6 +36,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
36
36
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
37
37
|
var Plugin_exports = {};
|
|
38
38
|
__export(Plugin_exports, {
|
|
39
|
+
WORKER_JOB_WORKFLOW_PROCESS: () => WORKER_JOB_WORKFLOW_PROCESS,
|
|
39
40
|
default: () => PluginWorkflowServer
|
|
40
41
|
});
|
|
41
42
|
module.exports = __toCommonJS(Plugin_exports);
|
|
@@ -58,7 +59,9 @@ var import_CreateInstruction = __toESM(require("./instructions/CreateInstruction
|
|
|
58
59
|
var import_DestroyInstruction = __toESM(require("./instructions/DestroyInstruction"));
|
|
59
60
|
var import_QueryInstruction = __toESM(require("./instructions/QueryInstruction"));
|
|
60
61
|
var import_UpdateInstruction = __toESM(require("./instructions/UpdateInstruction"));
|
|
62
|
+
var import_MultiConditionsInstruction = __toESM(require("./instructions/MultiConditionsInstruction"));
|
|
61
63
|
var import_WorkflowRepository = __toESM(require("./repositories/WorkflowRepository"));
|
|
64
|
+
const WORKER_JOB_WORKFLOW_PROCESS = "workflow:process";
|
|
62
65
|
class PluginWorkflowServer extends import_server.Plugin {
|
|
63
66
|
instructions = new import_utils.Registry();
|
|
64
67
|
triggers = new import_utils.Registry();
|
|
@@ -66,6 +69,9 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
66
69
|
enabledCache = /* @__PURE__ */ new Map();
|
|
67
70
|
snowflake;
|
|
68
71
|
dispatcher = new import_Dispatcher.default(this);
|
|
72
|
+
get channelPendingExecution() {
|
|
73
|
+
return `${this.name}.pendingExecution`;
|
|
74
|
+
}
|
|
69
75
|
loggerCache;
|
|
70
76
|
meter = null;
|
|
71
77
|
checker = null;
|
|
@@ -138,7 +144,6 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
138
144
|
// * add all hooks for enabled workflows
|
|
139
145
|
// * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks
|
|
140
146
|
onAfterStart = async () => {
|
|
141
|
-
this.dispatcher.setReady(true);
|
|
142
147
|
const collection = this.db.getCollection("workflows");
|
|
143
148
|
const workflows = await collection.repository.find({
|
|
144
149
|
appends: ["versionStats"]
|
|
@@ -165,19 +170,20 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
165
170
|
this.app.logger.info("workflow:dispatch");
|
|
166
171
|
this.dispatcher.dispatch();
|
|
167
172
|
});
|
|
173
|
+
this.dispatcher.setReady(true);
|
|
168
174
|
this.getLogger("dispatcher").info("(starting) check for queueing executions");
|
|
169
175
|
this.dispatcher.dispatch();
|
|
170
|
-
this.dispatcher.setReady(true);
|
|
171
176
|
};
|
|
172
177
|
onBeforeStop = async () => {
|
|
178
|
+
if (this.checker) {
|
|
179
|
+
clearInterval(this.checker);
|
|
180
|
+
}
|
|
181
|
+
await this.dispatcher.beforeStop();
|
|
173
182
|
this.app.logger.info(`stopping workflow plugin before app (${this.app.name}) shutdown...`);
|
|
174
183
|
for (const workflow of this.enabledCache.values()) {
|
|
175
184
|
this.toggle(workflow, false, { silent: true });
|
|
176
185
|
}
|
|
177
|
-
|
|
178
|
-
if (this.checker) {
|
|
179
|
-
clearInterval(this.checker);
|
|
180
|
-
}
|
|
186
|
+
this.app.eventQueue.unsubscribe(this.channelPendingExecution);
|
|
181
187
|
this.loggerCache.clear();
|
|
182
188
|
};
|
|
183
189
|
async handleSyncMessage(message) {
|
|
@@ -202,13 +208,16 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
202
208
|
}
|
|
203
209
|
}
|
|
204
210
|
}
|
|
211
|
+
serving() {
|
|
212
|
+
return this.app.serving(WORKER_JOB_WORKFLOW_PROCESS);
|
|
213
|
+
}
|
|
205
214
|
/**
|
|
206
215
|
* @experimental
|
|
207
216
|
*/
|
|
208
217
|
getLogger(workflowId = "dispatcher") {
|
|
209
218
|
const now = /* @__PURE__ */ new Date();
|
|
210
219
|
const date = `${now.getFullYear()}-${`0${now.getMonth() + 1}`.slice(-2)}-${`0${now.getDate()}`.slice(-2)}`;
|
|
211
|
-
const key = `${date}-${workflowId}
|
|
220
|
+
const key = `${date}-${workflowId}`;
|
|
212
221
|
if (this.loggerCache.has(key)) {
|
|
213
222
|
return this.loggerCache.get(key);
|
|
214
223
|
}
|
|
@@ -259,6 +268,7 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
259
268
|
initInstructions(more = {}) {
|
|
260
269
|
this.registerInstruction("calculation", import_CalculationInstruction.default);
|
|
261
270
|
this.registerInstruction("condition", import_ConditionInstruction.default);
|
|
271
|
+
this.registerInstruction("multi-conditions", import_MultiConditionsInstruction.default);
|
|
262
272
|
this.registerInstruction("end", import_EndInstruction.default);
|
|
263
273
|
this.registerInstruction("create", import_CreateInstruction.default);
|
|
264
274
|
this.registerInstruction("destroy", import_DestroyInstruction.default);
|
|
@@ -279,10 +289,6 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
279
289
|
this.snowflake = new import_nodejs_snowflake.Snowflake({
|
|
280
290
|
custom_epoch: pluginRecord == null ? void 0 : pluginRecord.createdAt.getTime()
|
|
281
291
|
});
|
|
282
|
-
this.app.backgroundJobManager.subscribe(`${this.name}.pendingExecution`, {
|
|
283
|
-
idle: () => this.app.serving(import_Dispatcher.WORKER_JOB_WORKFLOW_PROCESS) && this.dispatcher.idle,
|
|
284
|
-
process: this.dispatcher.onQueueExecution
|
|
285
|
-
});
|
|
286
292
|
}
|
|
287
293
|
/**
|
|
288
294
|
* @internal
|
|
@@ -297,14 +303,23 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
297
303
|
max: 20,
|
|
298
304
|
updateAgeOnGet: true,
|
|
299
305
|
dispose(logger) {
|
|
300
|
-
logger
|
|
306
|
+
const cachedLogger = logger;
|
|
307
|
+
if (!cachedLogger) {
|
|
308
|
+
return;
|
|
309
|
+
}
|
|
310
|
+
cachedLogger.silent = true;
|
|
311
|
+
if (typeof cachedLogger.close === "function") {
|
|
312
|
+
cachedLogger.close();
|
|
313
|
+
}
|
|
301
314
|
}
|
|
302
315
|
});
|
|
303
316
|
this.meter = this.app.telemetry.metric.getMeter();
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
317
|
+
if (this.meter) {
|
|
318
|
+
const counter = this.meter.createObservableGauge("workflow.events.counter");
|
|
319
|
+
counter.addCallback((result) => {
|
|
320
|
+
result.observe(this.dispatcher.getEventsCount());
|
|
321
|
+
});
|
|
322
|
+
}
|
|
308
323
|
this.app.acl.registerSnippet({
|
|
309
324
|
name: `pm.${this.name}.workflows`,
|
|
310
325
|
actions: [
|
|
@@ -316,6 +331,7 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
316
331
|
"executions:destroy",
|
|
317
332
|
"flow_nodes:update",
|
|
318
333
|
"flow_nodes:destroy",
|
|
334
|
+
"flow_nodes:destroyBranch",
|
|
319
335
|
"flow_nodes:test",
|
|
320
336
|
"jobs:get",
|
|
321
337
|
"workflowCategories:*"
|
|
@@ -333,12 +349,18 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
333
349
|
db.on("workflows.afterDestroy", this.onAfterDestroy);
|
|
334
350
|
this.app.on("afterStart", this.onAfterStart);
|
|
335
351
|
this.app.on("beforeStop", this.onBeforeStop);
|
|
352
|
+
this.app.eventQueue.subscribe(this.channelPendingExecution, {
|
|
353
|
+
idle: () => this.serving() && this.dispatcher.idle,
|
|
354
|
+
process: this.dispatcher.onQueueExecution
|
|
355
|
+
});
|
|
336
356
|
}
|
|
337
357
|
toggle(workflow, enable, { silent, transaction } = {}) {
|
|
338
358
|
const type = workflow.get("type");
|
|
339
359
|
const trigger = this.triggers.get(type);
|
|
340
360
|
if (!trigger) {
|
|
341
|
-
this.getLogger(workflow.id).error(`trigger type ${workflow.type} of workflow ${workflow.id} is not implemented
|
|
361
|
+
this.getLogger(workflow.id).error(`trigger type ${workflow.type} of workflow ${workflow.id} is not implemented`, {
|
|
362
|
+
workflowId: workflow.id
|
|
363
|
+
});
|
|
342
364
|
return;
|
|
343
365
|
}
|
|
344
366
|
const next = enable ?? workflow.get("enabled");
|
|
@@ -346,14 +368,20 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
346
368
|
const prev = workflow.previous();
|
|
347
369
|
if (prev.config) {
|
|
348
370
|
trigger.off({ ...workflow.get(), ...prev });
|
|
349
|
-
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id} based on configuration before updated
|
|
371
|
+
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id} based on configuration before updated`, {
|
|
372
|
+
workflowId: workflow.id
|
|
373
|
+
});
|
|
350
374
|
}
|
|
351
375
|
trigger.on(workflow);
|
|
352
|
-
this.getLogger(workflow.id).info(`toggle ON workflow ${workflow.id}
|
|
376
|
+
this.getLogger(workflow.id).info(`toggle ON workflow ${workflow.id}`, {
|
|
377
|
+
workflowId: workflow.id
|
|
378
|
+
});
|
|
353
379
|
this.enabledCache.set(workflow.id, workflow);
|
|
354
380
|
} else {
|
|
355
381
|
trigger.off(workflow);
|
|
356
|
-
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id}
|
|
382
|
+
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id}`, {
|
|
383
|
+
workflowId: workflow.id
|
|
384
|
+
});
|
|
357
385
|
this.enabledCache.delete(workflow.id);
|
|
358
386
|
}
|
|
359
387
|
if (!silent) {
|
|
@@ -453,3 +481,7 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
453
481
|
}
|
|
454
482
|
}
|
|
455
483
|
}
|
|
484
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
485
|
+
0 && (module.exports = {
|
|
486
|
+
WORKER_JOB_WORKFLOW_PROCESS
|
|
487
|
+
});
|
package/dist/server/Processor.js
CHANGED
|
@@ -144,7 +144,9 @@ class Processor {
|
|
|
144
144
|
async start() {
|
|
145
145
|
const { execution } = this;
|
|
146
146
|
if (execution.status) {
|
|
147
|
-
this.logger.warn(`execution was ended with status ${execution.status} before, could not be started again
|
|
147
|
+
this.logger.warn(`execution was ended with status ${execution.status} before, could not be started again`, {
|
|
148
|
+
workflowId: execution.workflowId
|
|
149
|
+
});
|
|
148
150
|
return;
|
|
149
151
|
}
|
|
150
152
|
await this.prepare();
|
|
@@ -158,7 +160,9 @@ class Processor {
|
|
|
158
160
|
async resume(job) {
|
|
159
161
|
const { execution } = this;
|
|
160
162
|
if (execution.status) {
|
|
161
|
-
this.logger.warn(`execution was ended with status ${execution.status} before, could not be resumed
|
|
163
|
+
this.logger.warn(`execution was ended with status ${execution.status} before, could not be resumed`, {
|
|
164
|
+
workflowId: execution.workflowId
|
|
165
|
+
});
|
|
162
166
|
return;
|
|
163
167
|
}
|
|
164
168
|
await this.prepare();
|
|
@@ -168,7 +172,7 @@ class Processor {
|
|
|
168
172
|
async exec(instruction, node, prevJob) {
|
|
169
173
|
let job;
|
|
170
174
|
try {
|
|
171
|
-
this.logger.debug(`config of node`, { data: node.config });
|
|
175
|
+
this.logger.debug(`config of node`, { data: node.config, workflowId: node.workflowId });
|
|
172
176
|
job = await instruction(node, prevJob, this);
|
|
173
177
|
if (job === null) {
|
|
174
178
|
return this.exit();
|
|
@@ -179,7 +183,7 @@ class Processor {
|
|
|
179
183
|
} catch (err) {
|
|
180
184
|
this.logger.error(
|
|
181
185
|
`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) failed: `,
|
|
182
|
-
err
|
|
186
|
+
{ error: err, workflowId: node.workflowId }
|
|
183
187
|
);
|
|
184
188
|
job = {
|
|
185
189
|
result: err instanceof Error ? {
|
|
@@ -199,7 +203,10 @@ class Processor {
|
|
|
199
203
|
}
|
|
200
204
|
const savedJob = this.saveJob(job);
|
|
201
205
|
this.logger.info(
|
|
202
|
-
`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) finished as status: ${savedJob.status}
|
|
206
|
+
`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) finished as status: ${savedJob.status}`,
|
|
207
|
+
{
|
|
208
|
+
workflowId: node.workflowId
|
|
209
|
+
}
|
|
203
210
|
);
|
|
204
211
|
this.logger.debug(`result of node`, { data: savedJob.result });
|
|
205
212
|
if (savedJob.status === import_constants.JOB_STATUS.RESOLVED && node.downstream) {
|
|
@@ -217,7 +224,9 @@ class Processor {
|
|
|
217
224
|
if (typeof instruction.run !== "function") {
|
|
218
225
|
return Promise.reject(new Error("`run` should be implemented for customized execution of the node"));
|
|
219
226
|
}
|
|
220
|
-
this.logger.info(`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id})
|
|
227
|
+
this.logger.info(`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id})`, {
|
|
228
|
+
workflowId: node.workflowId
|
|
229
|
+
});
|
|
221
230
|
return this.exec(instruction.run.bind(instruction), node, input);
|
|
222
231
|
}
|
|
223
232
|
// parent node should take over the control
|
|
@@ -225,7 +234,9 @@ class Processor {
|
|
|
225
234
|
this.logger.debug(`branch ended at node (${node.id})`);
|
|
226
235
|
const parentNode = this.findBranchParentNode(node);
|
|
227
236
|
if (parentNode) {
|
|
228
|
-
this.logger.debug(`not on main, recall to parent entry node (${node.id})})
|
|
237
|
+
this.logger.debug(`not on main, recall to parent entry node (${node.id})})`, {
|
|
238
|
+
workflowId: node.workflowId
|
|
239
|
+
});
|
|
229
240
|
await this.recall(parentNode, job);
|
|
230
241
|
return null;
|
|
231
242
|
}
|
|
@@ -242,7 +253,9 @@ class Processor {
|
|
|
242
253
|
new Error(`"resume" method should be implemented for [${node.type}] instruction of node (#${node.id})`)
|
|
243
254
|
);
|
|
244
255
|
}
|
|
245
|
-
this.logger.info(`execution (${this.execution.id}) resume instruction [${node.type}] for node (${node.id})
|
|
256
|
+
this.logger.info(`execution (${this.execution.id}) resume instruction [${node.type}] for node (${node.id})`, {
|
|
257
|
+
workflowId: node.workflowId
|
|
258
|
+
});
|
|
246
259
|
return this.exec(instruction.resume.bind(instruction), node, job);
|
|
247
260
|
}
|
|
248
261
|
async exit(s) {
|
|
@@ -261,6 +274,10 @@ class Processor {
|
|
|
261
274
|
changes.push([`status`, job.status]);
|
|
262
275
|
job.changed("status", false);
|
|
263
276
|
}
|
|
277
|
+
if (job.changed("meta")) {
|
|
278
|
+
changes.push([`meta`, JSON.stringify(job.meta ?? null)]);
|
|
279
|
+
job.changed("meta", false);
|
|
280
|
+
}
|
|
264
281
|
if (job.changed("result")) {
|
|
265
282
|
changes.push([`result`, JSON.stringify(job.result ?? null)]);
|
|
266
283
|
job.changed("result", false);
|
|
@@ -295,7 +312,9 @@ class Processor {
|
|
|
295
312
|
if (this.mainTransaction && this.mainTransaction !== this.transaction) {
|
|
296
313
|
await this.mainTransaction.commit();
|
|
297
314
|
}
|
|
298
|
-
this.logger.info(`execution (${this.execution.id}) exiting with status ${this.execution.status}
|
|
315
|
+
this.logger.info(`execution (${this.execution.id}) exiting with status ${this.execution.status}`, {
|
|
316
|
+
workflowId: this.execution.workflowId
|
|
317
|
+
});
|
|
299
318
|
return null;
|
|
300
319
|
}
|
|
301
320
|
/**
|
|
@@ -326,7 +345,9 @@ class Processor {
|
|
|
326
345
|
this.lastSavedJob = job;
|
|
327
346
|
this.jobsMapByNodeKey[job.nodeKey] = job;
|
|
328
347
|
this.jobResultsMapByNodeKey[job.nodeKey] = job.result;
|
|
329
|
-
this.logger.debug(`job added to save list: ${JSON.stringify(job)}
|
|
348
|
+
this.logger.debug(`job added to save list: ${JSON.stringify(job)}`, {
|
|
349
|
+
workflowId: this.execution.workflowId
|
|
350
|
+
});
|
|
330
351
|
return job;
|
|
331
352
|
}
|
|
332
353
|
/**
|
|
@@ -9,5 +9,6 @@
|
|
|
9
9
|
import { Context } from '@nocobase/actions';
|
|
10
10
|
export declare function create(context: Context, next: any): Promise<void>;
|
|
11
11
|
export declare function destroy(context: Context, next: any): Promise<void>;
|
|
12
|
+
export declare function destroyBranch(context: Context, next: any): Promise<void>;
|
|
12
13
|
export declare function update(context: Context, next: any): Promise<void>;
|
|
13
14
|
export declare function test(context: Context, next: any): Promise<void>;
|
|
@@ -38,6 +38,7 @@ var nodes_exports = {};
|
|
|
38
38
|
__export(nodes_exports, {
|
|
39
39
|
create: () => create,
|
|
40
40
|
destroy: () => destroy,
|
|
41
|
+
destroyBranch: () => destroyBranch,
|
|
41
42
|
test: () => test,
|
|
42
43
|
update: () => update
|
|
43
44
|
});
|
|
@@ -146,38 +147,146 @@ function searchBranchDownstreams(nodes, from) {
|
|
|
146
147
|
}
|
|
147
148
|
return result;
|
|
148
149
|
}
|
|
150
|
+
function findBranchTail(branchHead) {
|
|
151
|
+
let tail = branchHead;
|
|
152
|
+
while (tail.downstream) {
|
|
153
|
+
tail = tail.downstream;
|
|
154
|
+
}
|
|
155
|
+
return tail;
|
|
156
|
+
}
|
|
149
157
|
async function destroy(context, next) {
|
|
150
158
|
const { db } = context;
|
|
151
159
|
const repository = import_actions.utils.getRepositoryFromParams(context);
|
|
152
|
-
const { filterByTk } = context.action.params;
|
|
153
|
-
const
|
|
160
|
+
const { filterByTk, keepBranch } = context.action.params;
|
|
161
|
+
const keepBranchIndex = keepBranch == null || keepBranch === "" ? null : Number.parseInt(keepBranch, 10);
|
|
162
|
+
const fields = ["id", "upstreamId", "downstreamId", "branchIndex", "key"];
|
|
154
163
|
const instance = await repository.findOne({
|
|
155
164
|
filterByTk,
|
|
156
165
|
fields: [...fields, "workflowId"],
|
|
157
166
|
appends: ["upstream", "downstream", "workflow.versionStats.executed"]
|
|
158
167
|
});
|
|
168
|
+
if (!instance) {
|
|
169
|
+
context.throw(404, "Node not found");
|
|
170
|
+
}
|
|
159
171
|
if (instance.workflow.versionStats.executed > 0) {
|
|
160
172
|
context.throw(400, "Nodes in executed workflow could not be deleted");
|
|
161
173
|
}
|
|
162
174
|
await db.sequelize.transaction(async (transaction) => {
|
|
163
175
|
const { upstream, downstream } = instance.get();
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
176
|
+
const nodes = await repository.find({
|
|
177
|
+
filter: {
|
|
178
|
+
workflowId: instance.workflowId
|
|
179
|
+
},
|
|
180
|
+
fields,
|
|
181
|
+
transaction
|
|
182
|
+
});
|
|
183
|
+
const nodesMap = /* @__PURE__ */ new Map();
|
|
184
|
+
nodes.forEach((item) => {
|
|
185
|
+
nodesMap.set(item.id, item);
|
|
186
|
+
});
|
|
187
|
+
nodes.forEach((item) => {
|
|
188
|
+
if (item.upstreamId) {
|
|
189
|
+
item.upstream = nodesMap.get(item.upstreamId);
|
|
190
|
+
}
|
|
191
|
+
if (item.downstreamId) {
|
|
192
|
+
item.downstream = nodesMap.get(item.downstreamId);
|
|
193
|
+
}
|
|
194
|
+
});
|
|
195
|
+
const keepBranchHead = keepBranchIndex != null ? nodes.find((item) => item.upstreamId === instance.id && item.branchIndex == keepBranchIndex) : null;
|
|
196
|
+
if (keepBranchIndex != null && !keepBranchHead) {
|
|
197
|
+
context.throw(400, `Branch ${keepBranchIndex} not found`);
|
|
171
198
|
}
|
|
172
|
-
|
|
173
|
-
|
|
199
|
+
const keepBranchNodes = keepBranchHead ? searchBranchDownstreams(nodes, keepBranchHead) : [];
|
|
200
|
+
const keepBranchNodeIds = new Set(keepBranchNodes.map((item) => item.id));
|
|
201
|
+
const branchNodes = instance ? searchBranchNodes(nodes, instance) : [];
|
|
202
|
+
const branchNodesToDelete = keepBranchHead ? branchNodes.filter((item) => !keepBranchNodeIds.has(item.id)) : branchNodes;
|
|
203
|
+
if (keepBranchHead) {
|
|
204
|
+
if (upstream && upstream.downstreamId === instance.id) {
|
|
205
|
+
await upstream.update(
|
|
206
|
+
{
|
|
207
|
+
downstreamId: keepBranchHead.id
|
|
208
|
+
},
|
|
209
|
+
{ transaction }
|
|
210
|
+
);
|
|
211
|
+
}
|
|
212
|
+
await keepBranchHead.update(
|
|
174
213
|
{
|
|
175
214
|
upstreamId: instance.upstreamId,
|
|
176
215
|
branchIndex: instance.branchIndex
|
|
177
216
|
},
|
|
178
217
|
{ transaction }
|
|
179
218
|
);
|
|
219
|
+
if (downstream) {
|
|
220
|
+
const branchTail = findBranchTail(keepBranchHead);
|
|
221
|
+
await branchTail.update(
|
|
222
|
+
{
|
|
223
|
+
downstreamId: instance.downstreamId
|
|
224
|
+
},
|
|
225
|
+
{ transaction }
|
|
226
|
+
);
|
|
227
|
+
branchTail.downstreamId = instance.downstreamId;
|
|
228
|
+
branchTail.downstream = downstream;
|
|
229
|
+
await downstream.update(
|
|
230
|
+
{
|
|
231
|
+
upstreamId: branchTail.id,
|
|
232
|
+
branchIndex: null
|
|
233
|
+
},
|
|
234
|
+
{ transaction }
|
|
235
|
+
);
|
|
236
|
+
}
|
|
237
|
+
} else {
|
|
238
|
+
if (upstream && upstream.downstreamId === instance.id) {
|
|
239
|
+
await upstream.update(
|
|
240
|
+
{
|
|
241
|
+
downstreamId: instance.downstreamId
|
|
242
|
+
},
|
|
243
|
+
{ transaction }
|
|
244
|
+
);
|
|
245
|
+
}
|
|
246
|
+
if (downstream) {
|
|
247
|
+
await downstream.update(
|
|
248
|
+
{
|
|
249
|
+
upstreamId: instance.upstreamId,
|
|
250
|
+
branchIndex: instance.branchIndex
|
|
251
|
+
},
|
|
252
|
+
{ transaction }
|
|
253
|
+
);
|
|
254
|
+
}
|
|
180
255
|
}
|
|
256
|
+
await repository.destroy({
|
|
257
|
+
filterByTk: [instance.id, ...branchNodesToDelete.map((item) => item.id)],
|
|
258
|
+
transaction
|
|
259
|
+
});
|
|
260
|
+
});
|
|
261
|
+
context.body = instance;
|
|
262
|
+
await next();
|
|
263
|
+
}
|
|
264
|
+
async function destroyBranch(context, next) {
|
|
265
|
+
const { db } = context;
|
|
266
|
+
const repository = import_actions.utils.getRepositoryFromParams(context);
|
|
267
|
+
const { filterByTk, branchIndex: branchIndexParam, shift: shiftParam } = context.action.params;
|
|
268
|
+
if (branchIndexParam == null || branchIndexParam === "") {
|
|
269
|
+
context.throw(400, "branchIndex is required");
|
|
270
|
+
}
|
|
271
|
+
const branchIndex = Number.parseInt(branchIndexParam, 10);
|
|
272
|
+
if (Number.isNaN(branchIndex)) {
|
|
273
|
+
context.throw(400, "branchIndex must be a number");
|
|
274
|
+
}
|
|
275
|
+
const shift = !(shiftParam == null || shiftParam === "") && Number.parseInt(String(shiftParam), 10) === 1;
|
|
276
|
+
const fields = ["id", "upstreamId", "downstreamId", "branchIndex", "key"];
|
|
277
|
+
const instance = await repository.findOne({
|
|
278
|
+
filterByTk,
|
|
279
|
+
fields: [...fields, "workflowId"],
|
|
280
|
+
appends: ["workflow.versionStats.executed"]
|
|
281
|
+
});
|
|
282
|
+
if (!instance) {
|
|
283
|
+
context.throw(404, "Node not found");
|
|
284
|
+
}
|
|
285
|
+
if (instance.workflow.versionStats.executed > 0) {
|
|
286
|
+
context.throw(400, "Branches in executed workflow could not be deleted");
|
|
287
|
+
}
|
|
288
|
+
let deletedBranchHead = null;
|
|
289
|
+
await db.sequelize.transaction(async (transaction) => {
|
|
181
290
|
const nodes = await repository.find({
|
|
182
291
|
filter: {
|
|
183
292
|
workflowId: instance.workflowId
|
|
@@ -197,13 +306,34 @@ async function destroy(context, next) {
|
|
|
197
306
|
item.downstream = nodesMap.get(item.downstreamId);
|
|
198
307
|
}
|
|
199
308
|
});
|
|
200
|
-
const
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
309
|
+
const branchHeads = nodes.filter((item) => item.upstreamId === instance.id && item.branchIndex != null).sort((a, b) => a.branchIndex - b.branchIndex);
|
|
310
|
+
const branchHead = branchHeads.find((item) => item.branchIndex === branchIndex);
|
|
311
|
+
deletedBranchHead = branchHead || null;
|
|
312
|
+
if (branchHead) {
|
|
313
|
+
const nodesToDelete = searchBranchDownstreams(nodes, branchHead);
|
|
314
|
+
const idsToDelete = nodesToDelete.map((item) => item.id);
|
|
315
|
+
if (idsToDelete.length) {
|
|
316
|
+
await repository.destroy({
|
|
317
|
+
filterByTk: idsToDelete,
|
|
318
|
+
transaction
|
|
319
|
+
});
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
if (shift) {
|
|
323
|
+
const headsToShift = branchHeads.filter((item) => item.branchIndex > branchIndex);
|
|
324
|
+
await Promise.all(
|
|
325
|
+
headsToShift.map(
|
|
326
|
+
(item) => item.update(
|
|
327
|
+
{
|
|
328
|
+
branchIndex: item.branchIndex - 1
|
|
329
|
+
},
|
|
330
|
+
{ transaction }
|
|
331
|
+
)
|
|
332
|
+
)
|
|
333
|
+
);
|
|
334
|
+
}
|
|
205
335
|
});
|
|
206
|
-
context.body =
|
|
336
|
+
context.body = deletedBranchHead;
|
|
207
337
|
await next();
|
|
208
338
|
}
|
|
209
339
|
async function update(context, next) {
|
|
@@ -254,6 +384,7 @@ async function test(context, next) {
|
|
|
254
384
|
0 && (module.exports = {
|
|
255
385
|
create,
|
|
256
386
|
destroy,
|
|
387
|
+
destroyBranch,
|
|
257
388
|
test,
|
|
258
389
|
update
|
|
259
390
|
});
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file is part of the NocoBase (R) project.
|
|
3
|
+
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
|
|
4
|
+
* Authors: NocoBase Team.
|
|
5
|
+
*
|
|
6
|
+
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
|
+
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
|
+
*/
|
|
9
|
+
import { Instruction } from '.';
|
|
10
|
+
import type Processor from '../Processor';
|
|
11
|
+
import type { FlowNodeModel, JobModel } from '../types';
|
|
12
|
+
export declare class MultiConditionsInstruction extends Instruction {
|
|
13
|
+
run(node: FlowNodeModel, prevJob: any, processor: Processor): Promise<JobModel>;
|
|
14
|
+
resume(node: FlowNodeModel, branchJob: JobModel, processor: Processor): Promise<any>;
|
|
15
|
+
private evaluateCondition;
|
|
16
|
+
private getBranchNode;
|
|
17
|
+
}
|
|
18
|
+
export default MultiConditionsInstruction;
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file is part of the NocoBase (R) project.
|
|
3
|
+
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
|
|
4
|
+
* Authors: NocoBase Team.
|
|
5
|
+
*
|
|
6
|
+
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
|
+
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
var __defProp = Object.defineProperty;
|
|
11
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
12
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
13
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
14
|
+
var __export = (target, all) => {
|
|
15
|
+
for (var name in all)
|
|
16
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
17
|
+
};
|
|
18
|
+
var __copyProps = (to, from, except, desc) => {
|
|
19
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
20
|
+
for (let key of __getOwnPropNames(from))
|
|
21
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
22
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
23
|
+
}
|
|
24
|
+
return to;
|
|
25
|
+
};
|
|
26
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
27
|
+
var MultiConditionsInstruction_exports = {};
|
|
28
|
+
__export(MultiConditionsInstruction_exports, {
|
|
29
|
+
MultiConditionsInstruction: () => MultiConditionsInstruction,
|
|
30
|
+
default: () => MultiConditionsInstruction_default
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(MultiConditionsInstruction_exports);
|
|
33
|
+
var import_evaluators = require("@nocobase/evaluators");
|
|
34
|
+
var import__ = require(".");
|
|
35
|
+
var import_constants = require("../constants");
|
|
36
|
+
var import_logicCalculate = require("../logicCalculate");
|
|
37
|
+
class MultiConditionsInstruction extends import__.Instruction {
|
|
38
|
+
async run(node, prevJob, processor) {
|
|
39
|
+
const { conditions = [], continueOnNoMatch = false } = node.config || {};
|
|
40
|
+
const meta = { conditions: [] };
|
|
41
|
+
const job = processor.saveJob({
|
|
42
|
+
status: import_constants.JOB_STATUS.PENDING,
|
|
43
|
+
result: null,
|
|
44
|
+
meta,
|
|
45
|
+
nodeId: node.id,
|
|
46
|
+
nodeKey: node.key,
|
|
47
|
+
upstreamId: (prevJob == null ? void 0 : prevJob.id) ?? null
|
|
48
|
+
});
|
|
49
|
+
for (let cursor = 0; cursor < conditions.length; cursor++) {
|
|
50
|
+
const branchIndex = cursor + 1;
|
|
51
|
+
const condition = conditions[cursor];
|
|
52
|
+
let conditionResult;
|
|
53
|
+
try {
|
|
54
|
+
conditionResult = this.evaluateCondition(condition, node, processor);
|
|
55
|
+
} catch (error) {
|
|
56
|
+
conditionResult = error instanceof Error ? error.message : String(error);
|
|
57
|
+
processor.logger.error(`[multi-conditions] evaluate condition[${cursor}] error:`, { error });
|
|
58
|
+
} finally {
|
|
59
|
+
meta.conditions.push(conditionResult);
|
|
60
|
+
job.set("result", conditionResult);
|
|
61
|
+
}
|
|
62
|
+
if (typeof conditionResult === "string") {
|
|
63
|
+
job.set("status", import_constants.JOB_STATUS.ERROR);
|
|
64
|
+
return job;
|
|
65
|
+
}
|
|
66
|
+
if (conditionResult === true) {
|
|
67
|
+
const branchNode = this.getBranchNode(node, processor, branchIndex);
|
|
68
|
+
job.set("status", import_constants.JOB_STATUS.RESOLVED);
|
|
69
|
+
if (branchNode) {
|
|
70
|
+
await processor.run(branchNode, job);
|
|
71
|
+
return;
|
|
72
|
+
}
|
|
73
|
+
return job;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
job.set("status", continueOnNoMatch ? import_constants.JOB_STATUS.RESOLVED : import_constants.JOB_STATUS.FAILED);
|
|
77
|
+
const defaultBranch = this.getBranchNode(node, processor, 0);
|
|
78
|
+
if (defaultBranch) {
|
|
79
|
+
await processor.run(defaultBranch, job);
|
|
80
|
+
return;
|
|
81
|
+
}
|
|
82
|
+
return job;
|
|
83
|
+
}
|
|
84
|
+
async resume(node, branchJob, processor) {
|
|
85
|
+
const job = processor.findBranchParentJob(branchJob, node);
|
|
86
|
+
if (!job) {
|
|
87
|
+
throw new Error("Parent job not found");
|
|
88
|
+
}
|
|
89
|
+
const { continueOnNoMatch = false } = node.config || {};
|
|
90
|
+
const jobNode = processor.nodesMap.get(branchJob.nodeId);
|
|
91
|
+
const branchStartNode = processor.findBranchStartNode(jobNode, node);
|
|
92
|
+
const branchIndex = branchStartNode.branchIndex;
|
|
93
|
+
if (branchJob.status === import_constants.JOB_STATUS.RESOLVED) {
|
|
94
|
+
if (branchIndex > 0) {
|
|
95
|
+
job.set({
|
|
96
|
+
status: import_constants.JOB_STATUS.RESOLVED
|
|
97
|
+
});
|
|
98
|
+
return job;
|
|
99
|
+
}
|
|
100
|
+
job.set({ status: continueOnNoMatch ? import_constants.JOB_STATUS.RESOLVED : import_constants.JOB_STATUS.FAILED });
|
|
101
|
+
return job;
|
|
102
|
+
}
|
|
103
|
+
return processor.exit(branchJob.status);
|
|
104
|
+
}
|
|
105
|
+
evaluateCondition(condition, node, processor) {
|
|
106
|
+
const { engine = "basic", calculation, expression } = condition ?? {};
|
|
107
|
+
const evaluator = import_evaluators.evaluators.get(engine);
|
|
108
|
+
return evaluator ? evaluator(expression, processor.getScope(node.id)) : (0, import_logicCalculate.logicCalculate)(processor.getParsedValue(calculation, node.id));
|
|
109
|
+
}
|
|
110
|
+
getBranchNode(node, processor, branchIndex) {
|
|
111
|
+
return processor.getBranches(node).find((item) => Number(item.branchIndex) === Number(branchIndex));
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
var MultiConditionsInstruction_default = MultiConditionsInstruction;
|
|
115
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
116
|
+
0 && (module.exports = {
|
|
117
|
+
MultiConditionsInstruction
|
|
118
|
+
});
|