@nocobase/plugin-workflow 2.0.0-alpha.7 → 2.0.0-alpha.70

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/dist/client/0b6813dae26ccf21.js +10 -0
  2. package/dist/client/{f68fbc145c3ddec3.js → 2076012783c998ad.js} +1 -1
  3. package/dist/client/59fcf22e646963ed.js +10 -0
  4. package/dist/client/AddNodeContext.d.ts +1 -0
  5. package/dist/client/Branch.d.ts +2 -0
  6. package/dist/client/NodeClipboardContext.d.ts +11 -0
  7. package/dist/client/NodeDragContext.d.ts +11 -0
  8. package/dist/client/RemoveNodeContext.d.ts +11 -0
  9. package/dist/client/WorkflowTasks.d.ts +3 -1
  10. package/dist/client/flows/triggerWorkflows.d.ts +15 -42
  11. package/dist/client/index.d.ts +3 -0
  12. package/dist/client/index.js +1 -1
  13. package/dist/client/models/NodeDetailsModel.d.ts +34 -0
  14. package/dist/client/models/NodeValueModel.d.ts +15 -0
  15. package/dist/client/models/TaskCardCommonItemModel.d.ts +14 -0
  16. package/dist/client/models/index.d.ts +11 -0
  17. package/dist/client/nodeVariableUtils.d.ts +14 -0
  18. package/dist/client/nodes/calculation.d.ts +7 -0
  19. package/dist/client/nodes/condition.d.ts +0 -3
  20. package/dist/client/nodes/create.d.ts +7 -0
  21. package/dist/client/nodes/index.d.ts +14 -0
  22. package/dist/client/nodes/multi-conditions.d.ts +57 -0
  23. package/dist/client/nodes/output.d.ts +53 -0
  24. package/dist/client/nodes/query.d.ts +7 -0
  25. package/dist/client/schemas/executions.d.ts +1 -1
  26. package/dist/client/style.d.ts +4 -0
  27. package/dist/client/variable.d.ts +1 -1
  28. package/dist/common/collections/executions.d.ts +1 -1
  29. package/dist/common/collections/executions.js +13 -1
  30. package/dist/common/collections/jobs.js +8 -0
  31. package/dist/externalVersion.js +12 -11
  32. package/dist/locale/de-DE.json +237 -188
  33. package/dist/locale/en-US.json +252 -188
  34. package/dist/locale/es-ES.json +245 -78
  35. package/dist/locale/fr-FR.json +245 -78
  36. package/dist/locale/hu-HU.json +266 -0
  37. package/dist/locale/id-ID.json +266 -0
  38. package/dist/locale/it-IT.json +236 -176
  39. package/dist/locale/ja-JP.json +241 -164
  40. package/dist/locale/ko-KR.json +267 -150
  41. package/dist/locale/nl-NL.json +264 -99
  42. package/dist/locale/pt-BR.json +245 -78
  43. package/dist/locale/ru-RU.json +250 -67
  44. package/dist/locale/tr-TR.json +246 -63
  45. package/dist/locale/uk-UA.json +265 -0
  46. package/dist/locale/vi-VN.json +266 -0
  47. package/dist/locale/zh-CN.json +257 -226
  48. package/dist/locale/zh-TW.json +265 -0
  49. package/dist/node_modules/cron-parser/package.json +1 -1
  50. package/dist/node_modules/lru-cache/package.json +1 -1
  51. package/dist/node_modules/nodejs-snowflake/package.json +1 -1
  52. package/dist/server/Dispatcher.d.ts +1 -3
  53. package/dist/server/Dispatcher.js +32 -31
  54. package/dist/server/Plugin.d.ts +3 -0
  55. package/dist/server/Plugin.js +61 -21
  56. package/dist/server/Processor.d.ts +9 -0
  57. package/dist/server/Processor.js +43 -13
  58. package/dist/server/actions/index.js +5 -0
  59. package/dist/server/actions/jobs.d.ts +9 -0
  60. package/dist/server/actions/jobs.js +64 -0
  61. package/dist/server/actions/nodes.d.ts +3 -0
  62. package/dist/server/actions/nodes.js +427 -17
  63. package/dist/server/instructions/ConditionInstruction.js +4 -1
  64. package/dist/server/instructions/MultiConditionsInstruction.d.ts +18 -0
  65. package/dist/server/instructions/MultiConditionsInstruction.js +118 -0
  66. package/dist/server/instructions/OutputInstruction.d.ts +21 -0
  67. package/dist/server/instructions/OutputInstruction.js +54 -0
  68. package/dist/server/instructions/index.d.ts +3 -1
  69. package/dist/server/repositories/WorkflowRepository.js +10 -2
  70. package/dist/server/triggers/CollectionTrigger.d.ts +3 -0
  71. package/dist/server/triggers/CollectionTrigger.js +31 -3
  72. package/dist/server/triggers/ScheduleTrigger/DateFieldScheduleTrigger.js +8 -0
  73. package/dist/server/triggers/index.d.ts +3 -1
  74. package/dist/server/types/Job.d.ts +1 -0
  75. package/package.json +5 -2
  76. package/dist/client/4985975bcaea35eb.js +0 -10
  77. package/dist/client/91bf4b18d5aad6a7.js +0 -10
@@ -38,6 +38,9 @@ var nodes_exports = {};
38
38
  __export(nodes_exports, {
39
39
  create: () => create,
40
40
  destroy: () => destroy,
41
+ destroyBranch: () => destroyBranch,
42
+ duplicate: () => duplicate,
43
+ move: () => move,
41
44
  test: () => test,
42
45
  update: () => update
43
46
  });
@@ -48,10 +51,10 @@ var import__ = __toESM(require(".."));
48
51
  async function create(context, next) {
49
52
  const { db } = context;
50
53
  const repository = import_actions.utils.getRepositoryFromParams(context);
51
- const { whitelist, blacklist, updateAssociationValues, values, associatedIndex: workflowId } = context.action.params;
54
+ const { whitelist, blacklist, updateAssociationValues, values } = context.action.params;
52
55
  const workflowPlugin = context.app.pm.get(import__.default);
53
56
  context.body = await db.sequelize.transaction(async (transaction) => {
54
- const workflow = workflowPlugin.enabledCache.get(Number.parseInt(workflowId, 10)) || await repository.getSourceModel(transaction);
57
+ const workflow = workflowPlugin.enabledCache.get(Number.parseInt(context.action.sourceId, 10)) || await repository.getSourceModel(transaction);
55
58
  if (!workflow.versionStats) {
56
59
  workflow.versionStats = await workflow.getVersionStats({ transaction });
57
60
  }
@@ -132,6 +135,117 @@ async function create(context, next) {
132
135
  });
133
136
  await next();
134
137
  }
138
+ async function duplicate(context, next) {
139
+ const { db } = context;
140
+ const repository = import_actions.utils.getRepositoryFromParams(context);
141
+ const { whitelist, blacklist, filterByTk, values = {} } = context.action.params;
142
+ const workflowPlugin = context.app.pm.get(import__.default);
143
+ context.body = await db.sequelize.transaction(async (transaction) => {
144
+ const origin = filterByTk ? await repository.findOne({ filterByTk, transaction }) : null;
145
+ if (!origin) {
146
+ return context.throw(404, "Node not found");
147
+ }
148
+ const workflow = workflowPlugin.enabledCache.get(origin.workflowId) || await db.getRepository("workflows").findOne({
149
+ filterByTk: origin.workflowId,
150
+ transaction
151
+ });
152
+ if (!workflow) {
153
+ return context.throw(400, "Workflow not found");
154
+ }
155
+ if (!workflow.versionStats) {
156
+ workflow.versionStats = await workflow.getVersionStats({ transaction });
157
+ }
158
+ if (workflow.versionStats.executed > 0) {
159
+ context.throw(400, "Node could not be created in executed workflow");
160
+ }
161
+ const NODES_LIMIT = process.env.WORKFLOW_NODES_LIMIT ? parseInt(process.env.WORKFLOW_NODES_LIMIT, 10) : null;
162
+ if (NODES_LIMIT) {
163
+ const nodesCount = await workflow.countNodes({ transaction });
164
+ if (nodesCount >= NODES_LIMIT) {
165
+ context.throw(400, `The number of nodes in a workflow cannot exceed ${NODES_LIMIT}`);
166
+ }
167
+ }
168
+ let nextConfig = values.config;
169
+ if (!nextConfig) {
170
+ const instruction = workflowPlugin.instructions.get(origin.type);
171
+ if (instruction && typeof instruction.duplicateConfig === "function") {
172
+ nextConfig = await instruction.duplicateConfig(origin, { origin: origin ?? void 0, transaction });
173
+ }
174
+ }
175
+ const instance = await repository.create({
176
+ values: {
177
+ config: nextConfig ?? origin.config,
178
+ upstreamId: values.upstreamId,
179
+ branchIndex: values.branchIndex,
180
+ type: origin.type,
181
+ title: origin.title,
182
+ workflowId: origin.workflowId
183
+ },
184
+ whitelist,
185
+ blacklist,
186
+ context,
187
+ transaction
188
+ });
189
+ if (!instance.upstreamId) {
190
+ const previousHead = await repository.findOne({
191
+ filter: {
192
+ id: {
193
+ $ne: instance.id
194
+ },
195
+ workflowId: origin.workflowId,
196
+ upstreamId: null
197
+ },
198
+ transaction
199
+ });
200
+ if (previousHead) {
201
+ await previousHead.setUpstream(instance, { transaction });
202
+ await instance.setDownstream(previousHead, { transaction });
203
+ instance.set("downstream", previousHead);
204
+ }
205
+ return instance;
206
+ }
207
+ const upstream = await instance.getUpstream({ transaction });
208
+ if (instance.branchIndex == null) {
209
+ const downstream = await upstream.getDownstream({ transaction });
210
+ if (downstream) {
211
+ await downstream.setUpstream(instance, { transaction });
212
+ await instance.setDownstream(downstream, { transaction });
213
+ instance.set("downstream", downstream);
214
+ }
215
+ await upstream.update(
216
+ {
217
+ downstreamId: instance.id
218
+ },
219
+ { transaction }
220
+ );
221
+ upstream.set("downstream", instance);
222
+ } else {
223
+ const [downstream] = await upstream.getBranches({
224
+ where: {
225
+ id: {
226
+ [import_database.Op.ne]: instance.id
227
+ },
228
+ branchIndex: instance.branchIndex
229
+ },
230
+ transaction
231
+ });
232
+ if (downstream) {
233
+ await downstream.update(
234
+ {
235
+ upstreamId: instance.id,
236
+ branchIndex: null
237
+ },
238
+ { transaction }
239
+ );
240
+ await instance.setDownstream(downstream, { transaction });
241
+ instance.set("downstream", downstream);
242
+ }
243
+ }
244
+ instance.set("upstream", upstream);
245
+ return instance;
246
+ });
247
+ await next();
248
+ }
135
249
  function searchBranchNodes(nodes, from) {
136
250
  const branchHeads = nodes.filter((item) => item.upstreamId === from.id && item.branchIndex != null);
137
251
  return branchHeads.reduce(
@@ -146,38 +260,146 @@ function searchBranchDownstreams(nodes, from) {
146
260
  }
147
261
  return result;
148
262
  }
263
+ function findBranchTail(branchHead) {
264
+ let tail = branchHead;
265
+ while (tail.downstream) {
266
+ tail = tail.downstream;
267
+ }
268
+ return tail;
269
+ }
149
270
  async function destroy(context, next) {
150
271
  const { db } = context;
151
272
  const repository = import_actions.utils.getRepositoryFromParams(context);
152
- const { filterByTk } = context.action.params;
153
- const fields = ["id", "upstreamId", "downstreamId", "branchIndex"];
273
+ const { filterByTk, keepBranch } = context.action.params;
274
+ const keepBranchIndex = keepBranch == null || keepBranch === "" ? null : Number.parseInt(keepBranch, 10);
275
+ const fields = ["id", "upstreamId", "downstreamId", "branchIndex", "key"];
154
276
  const instance = await repository.findOne({
155
277
  filterByTk,
156
278
  fields: [...fields, "workflowId"],
157
279
  appends: ["upstream", "downstream", "workflow.versionStats.executed"]
158
280
  });
281
+ if (!instance) {
282
+ context.throw(404, "Node not found");
283
+ }
159
284
  if (instance.workflow.versionStats.executed > 0) {
160
285
  context.throw(400, "Nodes in executed workflow could not be deleted");
161
286
  }
162
287
  await db.sequelize.transaction(async (transaction) => {
163
288
  const { upstream, downstream } = instance.get();
164
- if (upstream && upstream.downstreamId === instance.id) {
165
- await upstream.update(
166
- {
167
- downstreamId: instance.downstreamId
168
- },
169
- { transaction }
170
- );
289
+ const nodes = await repository.find({
290
+ filter: {
291
+ workflowId: instance.workflowId
292
+ },
293
+ fields,
294
+ transaction
295
+ });
296
+ const nodesMap = /* @__PURE__ */ new Map();
297
+ nodes.forEach((item) => {
298
+ nodesMap.set(item.id, item);
299
+ });
300
+ nodes.forEach((item) => {
301
+ if (item.upstreamId) {
302
+ item.upstream = nodesMap.get(item.upstreamId);
303
+ }
304
+ if (item.downstreamId) {
305
+ item.downstream = nodesMap.get(item.downstreamId);
306
+ }
307
+ });
308
+ const keepBranchHead = keepBranchIndex != null ? nodes.find((item) => item.upstreamId === instance.id && item.branchIndex == keepBranchIndex) : null;
309
+ if (keepBranchIndex != null && !keepBranchHead) {
310
+ context.throw(400, `Branch ${keepBranchIndex} not found`);
171
311
  }
172
- if (downstream) {
173
- await downstream.update(
312
+ const keepBranchNodes = keepBranchHead ? searchBranchDownstreams(nodes, keepBranchHead) : [];
313
+ const keepBranchNodeIds = new Set(keepBranchNodes.map((item) => item.id));
314
+ const branchNodes = instance ? searchBranchNodes(nodes, instance) : [];
315
+ const branchNodesToDelete = keepBranchHead ? branchNodes.filter((item) => !keepBranchNodeIds.has(item.id)) : branchNodes;
316
+ if (keepBranchHead) {
317
+ if (upstream && upstream.downstreamId === instance.id) {
318
+ await upstream.update(
319
+ {
320
+ downstreamId: keepBranchHead.id
321
+ },
322
+ { transaction }
323
+ );
324
+ }
325
+ await keepBranchHead.update(
174
326
  {
175
327
  upstreamId: instance.upstreamId,
176
328
  branchIndex: instance.branchIndex
177
329
  },
178
330
  { transaction }
179
331
  );
332
+ if (downstream) {
333
+ const branchTail = findBranchTail(keepBranchHead);
334
+ await branchTail.update(
335
+ {
336
+ downstreamId: instance.downstreamId
337
+ },
338
+ { transaction }
339
+ );
340
+ branchTail.downstreamId = instance.downstreamId;
341
+ branchTail.downstream = downstream;
342
+ await downstream.update(
343
+ {
344
+ upstreamId: branchTail.id,
345
+ branchIndex: null
346
+ },
347
+ { transaction }
348
+ );
349
+ }
350
+ } else {
351
+ if (upstream && upstream.downstreamId === instance.id) {
352
+ await upstream.update(
353
+ {
354
+ downstreamId: instance.downstreamId
355
+ },
356
+ { transaction }
357
+ );
358
+ }
359
+ if (downstream) {
360
+ await downstream.update(
361
+ {
362
+ upstreamId: instance.upstreamId,
363
+ branchIndex: instance.branchIndex
364
+ },
365
+ { transaction }
366
+ );
367
+ }
180
368
  }
369
+ await repository.destroy({
370
+ filterByTk: [instance.id, ...branchNodesToDelete.map((item) => item.id)],
371
+ transaction
372
+ });
373
+ });
374
+ context.body = instance;
375
+ await next();
376
+ }
377
+ async function destroyBranch(context, next) {
378
+ const { db } = context;
379
+ const repository = import_actions.utils.getRepositoryFromParams(context);
380
+ const { filterByTk, branchIndex: branchIndexParam, shift: shiftParam } = context.action.params;
381
+ if (branchIndexParam == null || branchIndexParam === "") {
382
+ context.throw(400, "branchIndex is required");
383
+ }
384
+ const branchIndex = Number.parseInt(branchIndexParam, 10);
385
+ if (Number.isNaN(branchIndex)) {
386
+ context.throw(400, "branchIndex must be a number");
387
+ }
388
+ const shift = !(shiftParam == null || shiftParam === "") && Number.parseInt(String(shiftParam), 10) === 1;
389
+ const fields = ["id", "upstreamId", "downstreamId", "branchIndex", "key"];
390
+ const instance = await repository.findOne({
391
+ filterByTk,
392
+ fields: [...fields, "workflowId"],
393
+ appends: ["workflow.versionStats.executed"]
394
+ });
395
+ if (!instance) {
396
+ context.throw(404, "Node not found");
397
+ }
398
+ if (instance.workflow.versionStats.executed > 0) {
399
+ context.throw(400, "Branches in executed workflow could not be deleted");
400
+ }
401
+ let deletedBranchHead = null;
402
+ await db.sequelize.transaction(async (transaction) => {
181
403
  const nodes = await repository.find({
182
404
  filter: {
183
405
  workflowId: instance.workflowId
@@ -197,13 +419,198 @@ async function destroy(context, next) {
197
419
  item.downstream = nodesMap.get(item.downstreamId);
198
420
  }
199
421
  });
200
- const branchNodes = searchBranchNodes(nodes, nodesMap.get(instance.id));
201
- await repository.destroy({
202
- filterByTk: [instance.id, ...branchNodes.map((item) => item.id)],
422
+ const branchHeads = nodes.filter((item) => item.upstreamId === instance.id && item.branchIndex != null).sort((a, b) => a.branchIndex - b.branchIndex);
423
+ const branchHead = branchHeads.find((item) => item.branchIndex === branchIndex);
424
+ deletedBranchHead = branchHead || null;
425
+ if (branchHead) {
426
+ const nodesToDelete = searchBranchDownstreams(nodes, branchHead);
427
+ const idsToDelete = nodesToDelete.map((item) => item.id);
428
+ if (idsToDelete.length) {
429
+ await repository.destroy({
430
+ filterByTk: idsToDelete,
431
+ transaction
432
+ });
433
+ }
434
+ }
435
+ if (shift) {
436
+ const headsToShift = branchHeads.filter((item) => item.branchIndex > branchIndex);
437
+ await Promise.all(
438
+ headsToShift.map(
439
+ (item) => item.update(
440
+ {
441
+ branchIndex: item.branchIndex - 1
442
+ },
443
+ { transaction }
444
+ )
445
+ )
446
+ );
447
+ }
448
+ });
449
+ context.body = deletedBranchHead;
450
+ await next();
451
+ }
452
+ async function move(context, next) {
453
+ const { db } = context;
454
+ const repository = import_actions.utils.getRepositoryFromParams(context);
455
+ const { filterByTk, values = {} } = context.action.params;
456
+ const rawUpstreamId = values.upstreamId;
457
+ const rawBranchIndex = values.branchIndex;
458
+ const upstreamId = rawUpstreamId == null || rawUpstreamId === "" ? null : rawUpstreamId;
459
+ let branchIndex = rawBranchIndex == null || rawBranchIndex === "" ? null : Number.parseInt(rawBranchIndex, 10);
460
+ if (rawBranchIndex != null && rawBranchIndex !== "" && Number.isNaN(branchIndex)) {
461
+ context.throw(400, "branchIndex must be a number");
462
+ }
463
+ if (upstreamId == null) {
464
+ branchIndex = null;
465
+ }
466
+ const fields = ["id", "key", "upstreamId", "downstreamId", "branchIndex", "workflowId"];
467
+ context.body = await db.sequelize.transaction(async (transaction) => {
468
+ const instance = await repository.findOne({
469
+ filterByTk,
470
+ fields,
471
+ appends: ["upstream", "downstream", "workflow.versionStats"],
472
+ transaction
473
+ });
474
+ if (!instance) {
475
+ context.throw(404, "Node not found");
476
+ }
477
+ if (instance.workflow.versionStats.executed > 0) {
478
+ context.throw(400, "Nodes in executed workflow could not be moved");
479
+ }
480
+ if (upstreamId != null && String(upstreamId) === String(instance.id)) {
481
+ context.throw(400, "Invalid upstream node");
482
+ }
483
+ const sameUpstream = (instance.upstreamId ?? null) == (upstreamId ?? null);
484
+ const sameBranchIndex = (instance.branchIndex ?? null) == (branchIndex ?? null);
485
+ if (sameUpstream && sameBranchIndex) {
486
+ context.throw(400, "Node does not need to be moved");
487
+ }
488
+ const { upstream: oldUpstream, downstream: oldDownstream } = instance.get();
489
+ if (oldUpstream && oldUpstream.downstreamId === instance.id) {
490
+ await oldUpstream.update(
491
+ {
492
+ downstreamId: oldDownstream ? oldDownstream.id : null
493
+ },
494
+ { transaction }
495
+ );
496
+ }
497
+ if (oldDownstream && oldDownstream.upstreamId === instance.id) {
498
+ await oldDownstream.update(
499
+ {
500
+ upstreamId: oldUpstream ? oldUpstream.id : null,
501
+ branchIndex: instance.branchIndex ?? null
502
+ },
503
+ { transaction }
504
+ );
505
+ }
506
+ let targetUpstream = null;
507
+ if (upstreamId != null) {
508
+ targetUpstream = await repository.findOne({
509
+ filterByTk: upstreamId,
510
+ fields,
511
+ transaction
512
+ });
513
+ if (!targetUpstream) {
514
+ context.throw(404, "Upstream node not found");
515
+ }
516
+ if (targetUpstream.workflowId !== instance.workflowId) {
517
+ context.throw(400, "Upstream node is not in the same workflow");
518
+ }
519
+ }
520
+ let newDownstream = null;
521
+ if (!targetUpstream) {
522
+ const previousHead = await repository.findOne({
523
+ filter: {
524
+ workflowId: instance.workflowId,
525
+ upstreamId: null,
526
+ id: {
527
+ [import_database.Op.ne]: instance.id
528
+ }
529
+ },
530
+ fields,
531
+ transaction
532
+ });
533
+ if (previousHead) {
534
+ await previousHead.update(
535
+ {
536
+ upstreamId: instance.id,
537
+ branchIndex: null
538
+ },
539
+ { transaction }
540
+ );
541
+ newDownstream = previousHead;
542
+ }
543
+ await instance.update(
544
+ {
545
+ upstreamId: null,
546
+ branchIndex: null,
547
+ downstreamId: newDownstream ? newDownstream.id : null
548
+ },
549
+ { transaction }
550
+ );
551
+ return instance;
552
+ }
553
+ if (branchIndex == null) {
554
+ if (targetUpstream.downstreamId) {
555
+ newDownstream = await repository.findOne({
556
+ filterByTk: targetUpstream.downstreamId,
557
+ fields,
558
+ transaction
559
+ });
560
+ }
561
+ if (newDownstream) {
562
+ await newDownstream.update(
563
+ {
564
+ upstreamId: instance.id,
565
+ branchIndex: null
566
+ },
567
+ { transaction }
568
+ );
569
+ }
570
+ await targetUpstream.update(
571
+ {
572
+ downstreamId: instance.id
573
+ },
574
+ { transaction }
575
+ );
576
+ await instance.update(
577
+ {
578
+ upstreamId: targetUpstream.id,
579
+ branchIndex: null,
580
+ downstreamId: newDownstream ? newDownstream.id : null
581
+ },
582
+ { transaction }
583
+ );
584
+ return instance;
585
+ }
586
+ const branchHead = await repository.findOne({
587
+ filter: {
588
+ upstreamId: targetUpstream.id,
589
+ branchIndex
590
+ },
591
+ fields,
203
592
  transaction
204
593
  });
594
+ if (branchHead) {
595
+ await branchHead.update(
596
+ {
597
+ upstreamId: instance.id,
598
+ branchIndex: null
599
+ },
600
+ { transaction }
601
+ );
602
+ newDownstream = branchHead;
603
+ }
604
+ await instance.update(
605
+ {
606
+ upstreamId: targetUpstream.id,
607
+ branchIndex,
608
+ downstreamId: newDownstream ? newDownstream.id : null
609
+ },
610
+ { transaction }
611
+ );
612
+ return instance;
205
613
  });
206
- context.body = instance;
207
614
  await next();
208
615
  }
209
616
  async function update(context, next) {
@@ -254,6 +661,9 @@ async function test(context, next) {
254
661
  0 && (module.exports = {
255
662
  create,
256
663
  destroy,
664
+ destroyBranch,
665
+ duplicate,
666
+ move,
257
667
  test,
258
668
  update
259
669
  });
@@ -81,7 +81,10 @@ class ConditionInstruction extends import__.Instruction {
81
81
  if (branchJob.status === import_constants.JOB_STATUS.RESOLVED) {
82
82
  return job;
83
83
  }
84
- return processor.exit(branchJob.status);
84
+ if (branchJob.status === import_constants.JOB_STATUS.PENDING) {
85
+ return processor.exit(branchJob.status);
86
+ }
87
+ return branchJob;
85
88
  }
86
89
  async test({ engine, calculation, expression = "" }) {
87
90
  const evaluator = import_evaluators.evaluators.get(engine);
@@ -0,0 +1,18 @@
1
+ /**
2
+ * This file is part of the NocoBase (R) project.
3
+ * Copyright (c) 2020-2024 NocoBase Co., Ltd.
4
+ * Authors: NocoBase Team.
5
+ *
6
+ * This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
7
+ * For more information, please refer to: https://www.nocobase.com/agreement.
8
+ */
9
+ import { Instruction } from '.';
10
+ import type Processor from '../Processor';
11
+ import type { FlowNodeModel, JobModel } from '../types';
12
+ export declare class MultiConditionsInstruction extends Instruction {
13
+ run(node: FlowNodeModel, prevJob: any, processor: Processor): Promise<JobModel>;
14
+ resume(node: FlowNodeModel, branchJob: JobModel, processor: Processor): Promise<any>;
15
+ private evaluateCondition;
16
+ private getBranchNode;
17
+ }
18
+ export default MultiConditionsInstruction;
@@ -0,0 +1,118 @@
1
+ /**
2
+ * This file is part of the NocoBase (R) project.
3
+ * Copyright (c) 2020-2024 NocoBase Co., Ltd.
4
+ * Authors: NocoBase Team.
5
+ *
6
+ * This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
7
+ * For more information, please refer to: https://www.nocobase.com/agreement.
8
+ */
9
+
10
+ var __defProp = Object.defineProperty;
11
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
12
+ var __getOwnPropNames = Object.getOwnPropertyNames;
13
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
14
+ var __export = (target, all) => {
15
+ for (var name in all)
16
+ __defProp(target, name, { get: all[name], enumerable: true });
17
+ };
18
+ var __copyProps = (to, from, except, desc) => {
19
+ if (from && typeof from === "object" || typeof from === "function") {
20
+ for (let key of __getOwnPropNames(from))
21
+ if (!__hasOwnProp.call(to, key) && key !== except)
22
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
23
+ }
24
+ return to;
25
+ };
26
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
27
+ var MultiConditionsInstruction_exports = {};
28
+ __export(MultiConditionsInstruction_exports, {
29
+ MultiConditionsInstruction: () => MultiConditionsInstruction,
30
+ default: () => MultiConditionsInstruction_default
31
+ });
32
+ module.exports = __toCommonJS(MultiConditionsInstruction_exports);
33
+ var import_evaluators = require("@nocobase/evaluators");
34
+ var import__ = require(".");
35
+ var import_constants = require("../constants");
36
+ var import_logicCalculate = require("../logicCalculate");
37
+ class MultiConditionsInstruction extends import__.Instruction {
38
+ async run(node, prevJob, processor) {
39
+ const { conditions = [], continueOnNoMatch = false } = node.config || {};
40
+ const meta = { conditions: [] };
41
+ const job = processor.saveJob({
42
+ status: import_constants.JOB_STATUS.PENDING,
43
+ result: null,
44
+ meta,
45
+ nodeId: node.id,
46
+ nodeKey: node.key,
47
+ upstreamId: (prevJob == null ? void 0 : prevJob.id) ?? null
48
+ });
49
+ for (let cursor = 0; cursor < conditions.length; cursor++) {
50
+ const branchIndex = cursor + 1;
51
+ const condition = conditions[cursor];
52
+ let conditionResult;
53
+ try {
54
+ conditionResult = this.evaluateCondition(condition, node, processor);
55
+ } catch (error) {
56
+ conditionResult = error instanceof Error ? error.message : String(error);
57
+ processor.logger.error(`[multi-conditions] evaluate condition[${cursor}] error:`, { error });
58
+ } finally {
59
+ meta.conditions.push(conditionResult);
60
+ job.set("result", conditionResult);
61
+ }
62
+ if (typeof conditionResult === "string") {
63
+ job.set("status", import_constants.JOB_STATUS.ERROR);
64
+ return job;
65
+ }
66
+ if (conditionResult === true) {
67
+ const branchNode = this.getBranchNode(node, processor, branchIndex);
68
+ job.set("status", import_constants.JOB_STATUS.RESOLVED);
69
+ if (branchNode) {
70
+ await processor.run(branchNode, job);
71
+ return;
72
+ }
73
+ return job;
74
+ }
75
+ }
76
+ job.set("status", continueOnNoMatch ? import_constants.JOB_STATUS.RESOLVED : import_constants.JOB_STATUS.FAILED);
77
+ const defaultBranch = this.getBranchNode(node, processor, 0);
78
+ if (defaultBranch) {
79
+ await processor.run(defaultBranch, job);
80
+ return;
81
+ }
82
+ return job;
83
+ }
84
+ async resume(node, branchJob, processor) {
85
+ const job = processor.findBranchParentJob(branchJob, node);
86
+ if (!job) {
87
+ throw new Error("Parent job not found");
88
+ }
89
+ const { continueOnNoMatch = false } = node.config || {};
90
+ const jobNode = processor.nodesMap.get(branchJob.nodeId);
91
+ const branchStartNode = processor.findBranchStartNode(jobNode, node);
92
+ const branchIndex = branchStartNode.branchIndex;
93
+ if (branchJob.status === import_constants.JOB_STATUS.RESOLVED) {
94
+ if (branchIndex > 0) {
95
+ job.set({
96
+ status: import_constants.JOB_STATUS.RESOLVED
97
+ });
98
+ return job;
99
+ }
100
+ job.set({ status: continueOnNoMatch ? import_constants.JOB_STATUS.RESOLVED : import_constants.JOB_STATUS.FAILED });
101
+ return job;
102
+ }
103
+ return processor.exit(branchJob.status);
104
+ }
105
+ evaluateCondition(condition, node, processor) {
106
+ const { engine = "basic", calculation, expression } = condition ?? {};
107
+ const evaluator = import_evaluators.evaluators.get(engine);
108
+ return evaluator ? evaluator(expression, processor.getScope(node.id)) : (0, import_logicCalculate.logicCalculate)(processor.getParsedValue(calculation, node.id));
109
+ }
110
+ getBranchNode(node, processor, branchIndex) {
111
+ return processor.getBranches(node).find((item) => Number(item.branchIndex) === Number(branchIndex));
112
+ }
113
+ }
114
+ var MultiConditionsInstruction_default = MultiConditionsInstruction;
115
+ // Annotate the CommonJS export names for ESM import in node:
116
+ 0 && (module.exports = {
117
+ MultiConditionsInstruction
118
+ });
@@ -0,0 +1,21 @@
1
+ /**
2
+ * This file is part of the NocoBase (R) project.
3
+ * Copyright (c) 2020-2024 NocoBase Co., Ltd.
4
+ * Authors: NocoBase Team.
5
+ *
6
+ * This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
7
+ * For more information, please refer to: https://www.nocobase.com/agreement.
8
+ */
9
+ import { Instruction } from '.';
10
+ import Processor from '../Processor';
11
+ import { FlowNodeModel } from '../types';
12
+ export default class ExecutionResultInstruction extends Instruction {
13
+ run(node: FlowNodeModel, prevJob: any, processor: Processor): Promise<{
14
+ result: any;
15
+ status: -1;
16
+ } | {
17
+ result: any;
18
+ status: 1;
19
+ }>;
20
+ resume(node: FlowNodeModel, job: any, processor: Processor): Promise<any>;
21
+ }