@nocobase/plugin-workflow 0.7.0-alpha.4 → 0.7.0-alpha.58
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/actions/index.d.ts +3 -1
- package/lib/actions/index.js +33 -26
- package/{esm/actions/flow_nodes.d.ts → lib/actions/nodes.d.ts} +1 -0
- package/lib/actions/nodes.js +321 -0
- package/lib/actions/workflows.d.ts +3 -0
- package/lib/actions/workflows.js +271 -0
- package/lib/calculators/index.js +143 -91
- package/lib/collections/executions.js +40 -38
- package/lib/collections/flow_nodes.js +60 -72
- package/lib/collections/jobs.js +45 -47
- package/lib/collections/workflows.js +78 -63
- package/lib/constants.js +22 -17
- package/lib/index.js +71 -22
- package/lib/instructions/calculation.js +34 -29
- package/lib/instructions/condition.js +94 -87
- package/lib/instructions/create.d.ts +1 -1
- package/lib/instructions/create.js +43 -25
- package/lib/instructions/destroy.js +42 -25
- package/lib/instructions/index.js +46 -25
- package/lib/instructions/parallel.js +99 -84
- package/lib/instructions/prompt.js +21 -13
- package/lib/instructions/query.js +46 -25
- package/lib/instructions/update.d.ts +2 -1
- package/lib/instructions/update.js +44 -25
- package/lib/models/Execution.d.ts +3 -2
- package/lib/models/Execution.js +400 -241
- package/lib/models/FlowNode.js +18 -5
- package/lib/models/Job.js +18 -5
- package/lib/models/Workflow.d.ts +4 -5
- package/lib/models/Workflow.js +95 -72
- package/lib/server.d.ts +7 -2
- package/lib/server.js +114 -65
- package/lib/triggers/collection.d.ts +16 -0
- package/lib/triggers/collection.js +162 -0
- package/lib/triggers/index.d.ts +3 -6
- package/lib/triggers/index.js +14 -11
- package/package.json +7 -12
- package/esm/actions/flow_nodes.js +0 -139
- package/esm/actions/flow_nodes.js.map +0 -1
- package/esm/actions/index.d.ts +0 -1
- package/esm/actions/index.js +0 -8
- package/esm/actions/index.js.map +0 -1
- package/esm/calculators/index.d.ts +0 -38
- package/esm/calculators/index.js +0 -128
- package/esm/calculators/index.js.map +0 -1
- package/esm/collections/executions.d.ts +0 -3
- package/esm/collections/executions.js +0 -38
- package/esm/collections/executions.js.map +0 -1
- package/esm/collections/flow_nodes.d.ts +0 -3
- package/esm/collections/flow_nodes.js +0 -72
- package/esm/collections/flow_nodes.js.map +0 -1
- package/esm/collections/jobs.d.ts +0 -3
- package/esm/collections/jobs.js +0 -47
- package/esm/collections/jobs.js.map +0 -1
- package/esm/collections/workflows.d.ts +0 -3
- package/esm/collections/workflows.js +0 -63
- package/esm/collections/workflows.js.map +0 -1
- package/esm/constants.d.ts +0 -17
- package/esm/constants.js +0 -18
- package/esm/constants.js.map +0 -1
- package/esm/index.d.ts +0 -5
- package/esm/index.js +0 -6
- package/esm/index.js.map +0 -1
- package/esm/instructions/calculation.d.ts +0 -8
- package/esm/instructions/calculation.js +0 -55
- package/esm/instructions/calculation.js.map +0 -1
- package/esm/instructions/condition.d.ts +0 -5
- package/esm/instructions/condition.js +0 -99
- package/esm/instructions/condition.js.map +0 -1
- package/esm/instructions/create.d.ts +0 -8
- package/esm/instructions/create.js +0 -25
- package/esm/instructions/create.js.map +0 -1
- package/esm/instructions/destroy.d.ts +0 -8
- package/esm/instructions/destroy.js +0 -25
- package/esm/instructions/destroy.js.map +0 -1
- package/esm/instructions/index.d.ts +0 -15
- package/esm/instructions/index.js +0 -20
- package/esm/instructions/index.js.map +0 -1
- package/esm/instructions/parallel.d.ts +0 -13
- package/esm/instructions/parallel.js +0 -88
- package/esm/instructions/parallel.js.map +0 -1
- package/esm/instructions/prompt.d.ts +0 -7
- package/esm/instructions/prompt.js +0 -13
- package/esm/instructions/prompt.js.map +0 -1
- package/esm/instructions/query.d.ts +0 -8
- package/esm/instructions/query.js +0 -25
- package/esm/instructions/query.js.map +0 -1
- package/esm/instructions/update.d.ts +0 -8
- package/esm/instructions/update.js +0 -25
- package/esm/instructions/update.js.map +0 -1
- package/esm/models/Execution.d.ts +0 -50
- package/esm/models/Execution.js +0 -250
- package/esm/models/Execution.js.map +0 -1
- package/esm/models/FlowNode.d.ts +0 -17
- package/esm/models/FlowNode.js +0 -4
- package/esm/models/FlowNode.js.map +0 -1
- package/esm/models/Job.d.ts +0 -15
- package/esm/models/Job.js +0 -4
- package/esm/models/Job.js.map +0 -1
- package/esm/models/Workflow.d.ts +0 -27
- package/esm/models/Workflow.js +0 -72
- package/esm/models/Workflow.js.map +0 -1
- package/esm/server.d.ts +0 -5
- package/esm/server.js +0 -62
- package/esm/server.js.map +0 -1
- package/esm/triggers/index.d.ts +0 -9
- package/esm/triggers/index.js +0 -6
- package/esm/triggers/index.js.map +0 -1
- package/esm/triggers/model.d.ts +0 -12
- package/esm/triggers/model.js +0 -47
- package/esm/triggers/model.js.map +0 -1
- package/lib/actions/flow_nodes.d.ts +0 -3
- package/lib/actions/flow_nodes.js +0 -163
- package/lib/actions/flow_nodes.js.map +0 -1
- package/lib/actions/index.js.map +0 -1
- package/lib/calculators/index.js.map +0 -1
- package/lib/collections/executions.js.map +0 -1
- package/lib/collections/flow_nodes.js.map +0 -1
- package/lib/collections/jobs.js.map +0 -1
- package/lib/collections/workflows.js.map +0 -1
- package/lib/constants.js.map +0 -1
- package/lib/index.js.map +0 -1
- package/lib/instructions/calculation.js.map +0 -1
- package/lib/instructions/condition.js.map +0 -1
- package/lib/instructions/create.js.map +0 -1
- package/lib/instructions/destroy.js.map +0 -1
- package/lib/instructions/index.js.map +0 -1
- package/lib/instructions/parallel.js.map +0 -1
- package/lib/instructions/prompt.js.map +0 -1
- package/lib/instructions/query.js.map +0 -1
- package/lib/instructions/update.js.map +0 -1
- package/lib/models/Execution.js.map +0 -1
- package/lib/models/FlowNode.js.map +0 -1
- package/lib/models/Job.js.map +0 -1
- package/lib/models/Workflow.js.map +0 -1
- package/lib/server.js.map +0 -1
- package/lib/triggers/index.js.map +0 -1
- package/lib/triggers/model.d.ts +0 -12
- package/lib/triggers/model.js +0 -49
- package/lib/triggers/model.js.map +0 -1
- package/tsconfig.build.json +0 -9
package/lib/actions/index.d.ts
CHANGED
package/lib/actions/index.js
CHANGED
|
@@ -1,30 +1,37 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
}) : (function(o, m, k, k2) {
|
|
6
|
-
if (k2 === undefined) k2 = k;
|
|
7
|
-
o[k2] = m[k];
|
|
8
|
-
}));
|
|
9
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
10
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
11
|
-
}) : function(o, v) {
|
|
12
|
-
o["default"] = v;
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
13
5
|
});
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
};
|
|
21
|
-
|
|
22
|
-
|
|
6
|
+
exports.default = _default;
|
|
7
|
+
|
|
8
|
+
var workflows = _interopRequireWildcard(require("./workflows"));
|
|
9
|
+
|
|
10
|
+
var nodes = _interopRequireWildcard(require("./nodes"));
|
|
11
|
+
|
|
12
|
+
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
|
|
13
|
+
|
|
14
|
+
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
|
|
15
|
+
|
|
16
|
+
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
|
|
17
|
+
|
|
18
|
+
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
|
|
19
|
+
|
|
20
|
+
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
|
21
|
+
|
|
23
22
|
function make(name, mod) {
|
|
24
|
-
|
|
23
|
+
return Object.keys(mod).reduce((result, key) => _objectSpread(_objectSpread({}, result), {}, {
|
|
24
|
+
[`${name}:${key}`]: mod[key]
|
|
25
|
+
}), {});
|
|
25
26
|
}
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
27
|
+
|
|
28
|
+
function _default({
|
|
29
|
+
app
|
|
30
|
+
}) {
|
|
31
|
+
app.actions(_objectSpread(_objectSpread(_objectSpread({}, make('workflows', workflows)), make('workflows.nodes', {
|
|
32
|
+
create: nodes.create,
|
|
33
|
+
destroy: nodes.destroy
|
|
34
|
+
})), make('flow_nodes', {
|
|
35
|
+
update: nodes.update
|
|
36
|
+
})));
|
|
37
|
+
}
|
|
@@ -1,3 +1,4 @@
|
|
|
1
1
|
import { Context } from '@nocobase/actions';
|
|
2
2
|
export declare function create(context: Context, next: any): Promise<void>;
|
|
3
3
|
export declare function destroy(context: Context, next: any): Promise<void>;
|
|
4
|
+
export declare function update(context: Context, next: any): Promise<void>;
|
|
@@ -0,0 +1,321 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.create = create;
|
|
7
|
+
exports.destroy = destroy;
|
|
8
|
+
exports.update = update;
|
|
9
|
+
|
|
10
|
+
function _sequelize() {
|
|
11
|
+
const data = require("sequelize");
|
|
12
|
+
|
|
13
|
+
_sequelize = function _sequelize() {
|
|
14
|
+
return data;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
return data;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function _actions() {
|
|
21
|
+
const data = require("@nocobase/actions");
|
|
22
|
+
|
|
23
|
+
_actions = function _actions() {
|
|
24
|
+
return data;
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
return data;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); }
|
|
31
|
+
|
|
32
|
+
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
|
|
33
|
+
|
|
34
|
+
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
|
35
|
+
|
|
36
|
+
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; }
|
|
37
|
+
|
|
38
|
+
function _iterableToArrayLimit(arr, i) { var _i = arr == null ? null : typeof Symbol !== "undefined" && arr[Symbol.iterator] || arr["@@iterator"]; if (_i == null) return; var _arr = []; var _n = true; var _d = false; var _s, _e; try { for (_i = _i.call(arr); !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
|
|
39
|
+
|
|
40
|
+
function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
|
|
41
|
+
|
|
42
|
+
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
|
|
43
|
+
|
|
44
|
+
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
|
|
45
|
+
|
|
46
|
+
function create(_x, _x2) {
|
|
47
|
+
return _create.apply(this, arguments);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
function _create() {
|
|
51
|
+
_create = _asyncToGenerator(function* (context, next) {
|
|
52
|
+
const db = context.db;
|
|
53
|
+
|
|
54
|
+
const repository = _actions().utils.getRepositoryFromParams(context);
|
|
55
|
+
|
|
56
|
+
const _context$action$param = context.action.params,
|
|
57
|
+
whitelist = _context$action$param.whitelist,
|
|
58
|
+
blacklist = _context$action$param.blacklist,
|
|
59
|
+
updateAssociationValues = _context$action$param.updateAssociationValues,
|
|
60
|
+
values = _context$action$param.values,
|
|
61
|
+
workflowId = _context$action$param.associatedIndex;
|
|
62
|
+
context.body = yield db.sequelize.transaction( /*#__PURE__*/function () {
|
|
63
|
+
var _ref = _asyncToGenerator(function* (transaction) {
|
|
64
|
+
const workflow = yield repository.getSourceModel(transaction);
|
|
65
|
+
|
|
66
|
+
if (workflow.executed) {
|
|
67
|
+
context.throw(400, 'Node could not be created in executed workflow');
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const instance = yield repository.create({
|
|
71
|
+
values,
|
|
72
|
+
whitelist,
|
|
73
|
+
blacklist,
|
|
74
|
+
updateAssociationValues,
|
|
75
|
+
context,
|
|
76
|
+
transaction
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
if (!instance.upstreamId) {
|
|
80
|
+
const previousHead = yield repository.findOne({
|
|
81
|
+
filter: {
|
|
82
|
+
id: {
|
|
83
|
+
$ne: instance.id
|
|
84
|
+
},
|
|
85
|
+
upstreamId: null
|
|
86
|
+
},
|
|
87
|
+
transaction
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
if (previousHead) {
|
|
91
|
+
yield previousHead.setUpstream(instance, {
|
|
92
|
+
transaction
|
|
93
|
+
});
|
|
94
|
+
yield instance.setDownstream(previousHead, {
|
|
95
|
+
transaction
|
|
96
|
+
});
|
|
97
|
+
instance.set('downstream', previousHead);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
return instance;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
const upstream = yield instance.getUpstream({
|
|
104
|
+
transaction
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
if (instance.branchIndex == null) {
|
|
108
|
+
const downstream = yield upstream.getDownstream({
|
|
109
|
+
transaction
|
|
110
|
+
});
|
|
111
|
+
|
|
112
|
+
if (downstream) {
|
|
113
|
+
yield downstream.setUpstream(instance, {
|
|
114
|
+
transaction
|
|
115
|
+
});
|
|
116
|
+
yield instance.setDownstream(downstream, {
|
|
117
|
+
transaction
|
|
118
|
+
});
|
|
119
|
+
instance.set('downstream', downstream);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
yield upstream.update({
|
|
123
|
+
downstreamId: instance.id
|
|
124
|
+
}, {
|
|
125
|
+
transaction
|
|
126
|
+
});
|
|
127
|
+
upstream.set('downstream', instance);
|
|
128
|
+
} else {
|
|
129
|
+
const _yield$upstream$getBr = yield upstream.getBranches({
|
|
130
|
+
where: {
|
|
131
|
+
id: {
|
|
132
|
+
[_sequelize().Op.ne]: instance.id
|
|
133
|
+
},
|
|
134
|
+
branchIndex: instance.branchIndex
|
|
135
|
+
},
|
|
136
|
+
transaction
|
|
137
|
+
}),
|
|
138
|
+
_yield$upstream$getBr2 = _slicedToArray(_yield$upstream$getBr, 1),
|
|
139
|
+
downstream = _yield$upstream$getBr2[0];
|
|
140
|
+
|
|
141
|
+
if (downstream) {
|
|
142
|
+
yield downstream.update({
|
|
143
|
+
upstreamId: instance.id,
|
|
144
|
+
branchIndex: null
|
|
145
|
+
}, {
|
|
146
|
+
transaction
|
|
147
|
+
});
|
|
148
|
+
yield instance.setDownstream(downstream, {
|
|
149
|
+
transaction
|
|
150
|
+
});
|
|
151
|
+
instance.set('downstream', downstream);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
instance.set('upstream', upstream);
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
return function (_x7) {
|
|
159
|
+
return _ref.apply(this, arguments);
|
|
160
|
+
};
|
|
161
|
+
}());
|
|
162
|
+
yield next();
|
|
163
|
+
});
|
|
164
|
+
return _create.apply(this, arguments);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
function searchBranchNodes(nodes, from) {
|
|
168
|
+
const branchHeads = nodes.filter(item => item.upstreamId === from.id && item.branchIndex != null);
|
|
169
|
+
return branchHeads.reduce((flatten, head) => flatten.concat(searchBranchDownstreams(nodes, head)), []);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
function searchBranchDownstreams(nodes, from) {
|
|
173
|
+
let result = [];
|
|
174
|
+
|
|
175
|
+
for (let search = from; search; search = search.downstream) {
|
|
176
|
+
result = [...result, search, ...searchBranchNodes(nodes, search)];
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
return result;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
function destroy(_x3, _x4) {
|
|
183
|
+
return _destroy.apply(this, arguments);
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
function _destroy() {
|
|
187
|
+
_destroy = _asyncToGenerator(function* (context, next) {
|
|
188
|
+
const db = context.db;
|
|
189
|
+
|
|
190
|
+
const repository = _actions().utils.getRepositoryFromParams(context);
|
|
191
|
+
|
|
192
|
+
const filterByTk = context.action.params.filterByTk;
|
|
193
|
+
context.body = yield db.sequelize.transaction( /*#__PURE__*/function () {
|
|
194
|
+
var _ref2 = _asyncToGenerator(function* (transaction) {
|
|
195
|
+
const workflow = yield repository.getSourceModel(transaction);
|
|
196
|
+
|
|
197
|
+
if (workflow.executed) {
|
|
198
|
+
context.throw(400, 'Nodes in executed workflow could not be deleted');
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
const fields = ['id', 'upstreamId', 'downstreamId', 'branchIndex'];
|
|
202
|
+
const instance = yield repository.findOne({
|
|
203
|
+
filterByTk,
|
|
204
|
+
fields: [...fields, 'workflowId'],
|
|
205
|
+
appends: ['upstream', 'downstream'],
|
|
206
|
+
transaction
|
|
207
|
+
});
|
|
208
|
+
|
|
209
|
+
const _instance$get = instance.get(),
|
|
210
|
+
upstream = _instance$get.upstream,
|
|
211
|
+
downstream = _instance$get.downstream;
|
|
212
|
+
|
|
213
|
+
if (upstream && upstream.downstreamId === instance.id) {
|
|
214
|
+
yield upstream.update({
|
|
215
|
+
downstreamId: instance.downstreamId
|
|
216
|
+
}, {
|
|
217
|
+
transaction
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
if (downstream) {
|
|
222
|
+
yield downstream.update({
|
|
223
|
+
upstreamId: instance.upstreamId,
|
|
224
|
+
branchIndex: instance.branchIndex
|
|
225
|
+
}, {
|
|
226
|
+
transaction
|
|
227
|
+
});
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
const nodes = yield repository.find({
|
|
231
|
+
filter: {
|
|
232
|
+
workflowId: instance.workflowId
|
|
233
|
+
},
|
|
234
|
+
fields,
|
|
235
|
+
transaction
|
|
236
|
+
});
|
|
237
|
+
const nodesMap = new Map(); // make map
|
|
238
|
+
|
|
239
|
+
nodes.forEach(item => {
|
|
240
|
+
nodesMap.set(item.id, item);
|
|
241
|
+
}); // overwrite
|
|
242
|
+
|
|
243
|
+
nodesMap.set(instance.id, instance); // make linked list
|
|
244
|
+
|
|
245
|
+
nodes.forEach(item => {
|
|
246
|
+
if (item.upstreamId) {
|
|
247
|
+
item.upstream = nodesMap.get(item.upstreamId);
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
if (item.downstreamId) {
|
|
251
|
+
item.downstream = nodesMap.get(item.downstreamId);
|
|
252
|
+
}
|
|
253
|
+
});
|
|
254
|
+
const branchNodes = searchBranchNodes(nodes, instance);
|
|
255
|
+
yield repository.destroy({
|
|
256
|
+
filterByTk: [instance.id, ...branchNodes.map(item => item.id)],
|
|
257
|
+
transaction
|
|
258
|
+
});
|
|
259
|
+
return instance;
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
return function (_x8) {
|
|
263
|
+
return _ref2.apply(this, arguments);
|
|
264
|
+
};
|
|
265
|
+
}());
|
|
266
|
+
yield next();
|
|
267
|
+
});
|
|
268
|
+
return _destroy.apply(this, arguments);
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
function update(_x5, _x6) {
|
|
272
|
+
return _update.apply(this, arguments);
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
function _update() {
|
|
276
|
+
_update = _asyncToGenerator(function* (context, next) {
|
|
277
|
+
const db = context.db;
|
|
278
|
+
|
|
279
|
+
const repository = _actions().utils.getRepositoryFromParams(context);
|
|
280
|
+
|
|
281
|
+
const _context$action$param2 = context.action.params,
|
|
282
|
+
filterByTk = _context$action$param2.filterByTk,
|
|
283
|
+
values = _context$action$param2.values,
|
|
284
|
+
whitelist = _context$action$param2.whitelist,
|
|
285
|
+
blacklist = _context$action$param2.blacklist,
|
|
286
|
+
filter = _context$action$param2.filter,
|
|
287
|
+
updateAssociationValues = _context$action$param2.updateAssociationValues;
|
|
288
|
+
context.body = yield db.sequelize.transaction( /*#__PURE__*/function () {
|
|
289
|
+
var _ref3 = _asyncToGenerator(function* (transaction) {
|
|
290
|
+
// TODO(optimize): duplicated instance query
|
|
291
|
+
const _yield$repository$fin = yield repository.findOne({
|
|
292
|
+
filterByTk,
|
|
293
|
+
appends: ['workflow.executed'],
|
|
294
|
+
transaction
|
|
295
|
+
}),
|
|
296
|
+
workflow = _yield$repository$fin.workflow;
|
|
297
|
+
|
|
298
|
+
if (workflow.executed) {
|
|
299
|
+
context.throw(400, 'Nodes in executed workflow could not be reconfigured');
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
return repository.update({
|
|
303
|
+
filterByTk,
|
|
304
|
+
values,
|
|
305
|
+
whitelist,
|
|
306
|
+
blacklist,
|
|
307
|
+
filter,
|
|
308
|
+
updateAssociationValues,
|
|
309
|
+
context,
|
|
310
|
+
transaction
|
|
311
|
+
});
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
return function (_x9) {
|
|
315
|
+
return _ref3.apply(this, arguments);
|
|
316
|
+
};
|
|
317
|
+
}());
|
|
318
|
+
yield next();
|
|
319
|
+
});
|
|
320
|
+
return _update.apply(this, arguments);
|
|
321
|
+
}
|
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.duplicate = duplicate;
|
|
7
|
+
exports.update = update;
|
|
8
|
+
|
|
9
|
+
function _actions() {
|
|
10
|
+
const data = require("@nocobase/actions");
|
|
11
|
+
|
|
12
|
+
_actions = function _actions() {
|
|
13
|
+
return data;
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
return data;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function _database() {
|
|
20
|
+
const data = require("@nocobase/database");
|
|
21
|
+
|
|
22
|
+
_database = function _database() {
|
|
23
|
+
return data;
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
return data;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function _slicedToArray(arr, i) { return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); }
|
|
30
|
+
|
|
31
|
+
function _nonIterableRest() { throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
|
|
32
|
+
|
|
33
|
+
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
|
34
|
+
|
|
35
|
+
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; }
|
|
36
|
+
|
|
37
|
+
function _iterableToArrayLimit(arr, i) { var _i = arr == null ? null : typeof Symbol !== "undefined" && arr[Symbol.iterator] || arr["@@iterator"]; if (_i == null) return; var _arr = []; var _n = true; var _d = false; var _s, _e; try { for (_i = _i.call(arr); !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"] != null) _i["return"](); } finally { if (_d) throw _e; } } return _arr; }
|
|
38
|
+
|
|
39
|
+
function _arrayWithHoles(arr) { if (Array.isArray(arr)) return arr; }
|
|
40
|
+
|
|
41
|
+
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
|
|
42
|
+
|
|
43
|
+
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
|
|
44
|
+
|
|
45
|
+
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
|
|
46
|
+
|
|
47
|
+
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
|
|
48
|
+
|
|
49
|
+
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
|
|
50
|
+
|
|
51
|
+
function _asyncIterator(iterable) { var method, async, sync, retry = 2; for ("undefined" != typeof Symbol && (async = Symbol.asyncIterator, sync = Symbol.iterator); retry--;) { if (async && null != (method = iterable[async])) return method.call(iterable); if (sync && null != (method = iterable[sync])) return new AsyncFromSyncIterator(method.call(iterable)); async = "@@asyncIterator", sync = "@@iterator"; } throw new TypeError("Object is not async iterable"); }
|
|
52
|
+
|
|
53
|
+
function AsyncFromSyncIterator(s) { function AsyncFromSyncIteratorContinuation(r) { if (Object(r) !== r) return Promise.reject(new TypeError(r + " is not an object.")); var done = r.done; return Promise.resolve(r.value).then(function (value) { return { value: value, done: done }; }); } return AsyncFromSyncIterator = function AsyncFromSyncIterator(s) { this.s = s, this.n = s.next; }, AsyncFromSyncIterator.prototype = { s: null, n: null, next: function next() { return AsyncFromSyncIteratorContinuation(this.n.apply(this.s, arguments)); }, return: function _return(value) { var ret = this.s.return; return void 0 === ret ? Promise.resolve({ value: value, done: !0 }) : AsyncFromSyncIteratorContinuation(ret.apply(this.s, arguments)); }, throw: function _throw(value) { var thr = this.s.return; return void 0 === thr ? Promise.reject(value) : AsyncFromSyncIteratorContinuation(thr.apply(this.s, arguments)); } }, new AsyncFromSyncIterator(s); }
|
|
54
|
+
|
|
55
|
+
function update(_x, _x2) {
|
|
56
|
+
return _update.apply(this, arguments);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
function _update() {
|
|
60
|
+
_update = _asyncToGenerator(function* (context, next) {
|
|
61
|
+
const db = context.db;
|
|
62
|
+
|
|
63
|
+
const repository = _actions().utils.getRepositoryFromParams(context);
|
|
64
|
+
|
|
65
|
+
const _context$action$param = context.action.params,
|
|
66
|
+
filterByTk = _context$action$param.filterByTk,
|
|
67
|
+
values = _context$action$param.values,
|
|
68
|
+
whitelist = _context$action$param.whitelist,
|
|
69
|
+
blacklist = _context$action$param.blacklist,
|
|
70
|
+
filter = _context$action$param.filter,
|
|
71
|
+
updateAssociationValues = _context$action$param.updateAssociationValues;
|
|
72
|
+
context.body = yield db.sequelize.transaction( /*#__PURE__*/function () {
|
|
73
|
+
var _ref = _asyncToGenerator(function* (transaction) {
|
|
74
|
+
const others = {};
|
|
75
|
+
|
|
76
|
+
if (values.enabled) {
|
|
77
|
+
values.current = true;
|
|
78
|
+
others.enabled = false;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
if (values.current) {
|
|
82
|
+
others.current = false;
|
|
83
|
+
yield repository.update({
|
|
84
|
+
filter: {
|
|
85
|
+
key: values.key,
|
|
86
|
+
id: {
|
|
87
|
+
[_database().Op.ne]: filterByTk
|
|
88
|
+
}
|
|
89
|
+
},
|
|
90
|
+
values: others,
|
|
91
|
+
context,
|
|
92
|
+
transaction
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const instance = yield repository.update({
|
|
97
|
+
filterByTk,
|
|
98
|
+
values,
|
|
99
|
+
whitelist,
|
|
100
|
+
blacklist,
|
|
101
|
+
filter,
|
|
102
|
+
updateAssociationValues,
|
|
103
|
+
context,
|
|
104
|
+
transaction
|
|
105
|
+
});
|
|
106
|
+
return instance;
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
return function (_x5) {
|
|
110
|
+
return _ref.apply(this, arguments);
|
|
111
|
+
};
|
|
112
|
+
}());
|
|
113
|
+
yield next();
|
|
114
|
+
});
|
|
115
|
+
return _update.apply(this, arguments);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
function typeOf(value) {
|
|
119
|
+
if (Array.isArray(value)) {
|
|
120
|
+
return 'array';
|
|
121
|
+
} else if (value instanceof Date) {
|
|
122
|
+
return 'date';
|
|
123
|
+
} else if (value === null) {
|
|
124
|
+
return 'null';
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
return typeof value;
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
function migrateConfig(config, oldToNew) {
|
|
131
|
+
function migrate(value) {
|
|
132
|
+
switch (typeOf(value)) {
|
|
133
|
+
case 'object':
|
|
134
|
+
return Object.keys(value).reduce((result, key) => _objectSpread(_objectSpread({}, result), {}, {
|
|
135
|
+
[key]: migrate(value[key])
|
|
136
|
+
}), {});
|
|
137
|
+
|
|
138
|
+
case 'array':
|
|
139
|
+
return value.map(item => migrate(item));
|
|
140
|
+
|
|
141
|
+
case 'string':
|
|
142
|
+
return value.replace(/(\{\{\$jobsMapByNodeId\.)(\d+)/, (_, prefix, id) => `${prefix}${oldToNew.get(Number.parseInt(id, 10)).id}`);
|
|
143
|
+
|
|
144
|
+
default:
|
|
145
|
+
return value;
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
return migrate(config);
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
function duplicate(_x3, _x4) {
|
|
153
|
+
return _duplicate.apply(this, arguments);
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
function _duplicate() {
|
|
157
|
+
_duplicate = _asyncToGenerator(function* (context, next) {
|
|
158
|
+
const db = context.db;
|
|
159
|
+
|
|
160
|
+
const repository = _actions().utils.getRepositoryFromParams(context);
|
|
161
|
+
|
|
162
|
+
const filterByTk = context.action.params.filterByTk;
|
|
163
|
+
context.body = yield db.sequelize.transaction( /*#__PURE__*/function () {
|
|
164
|
+
var _ref2 = _asyncToGenerator(function* (transaction) {
|
|
165
|
+
const origin = yield repository.findOne({
|
|
166
|
+
filterByTk,
|
|
167
|
+
appends: ['nodes'],
|
|
168
|
+
context,
|
|
169
|
+
transaction
|
|
170
|
+
});
|
|
171
|
+
const instance = yield repository.create({
|
|
172
|
+
values: {
|
|
173
|
+
key: origin.key,
|
|
174
|
+
title: origin.title,
|
|
175
|
+
description: origin.description,
|
|
176
|
+
type: origin.type,
|
|
177
|
+
config: origin.config
|
|
178
|
+
},
|
|
179
|
+
transaction
|
|
180
|
+
});
|
|
181
|
+
const originalNodesMap = new Map();
|
|
182
|
+
origin.nodes.forEach(node => {
|
|
183
|
+
originalNodesMap.set(node.id, node);
|
|
184
|
+
});
|
|
185
|
+
const oldToNew = new Map();
|
|
186
|
+
const newToOld = new Map();
|
|
187
|
+
var _iteratorAbruptCompletion = false;
|
|
188
|
+
var _didIteratorError = false;
|
|
189
|
+
|
|
190
|
+
var _iteratorError;
|
|
191
|
+
|
|
192
|
+
try {
|
|
193
|
+
for (var _iterator = _asyncIterator(origin.nodes), _step; _iteratorAbruptCompletion = !(_step = yield _iterator.next()).done; _iteratorAbruptCompletion = false) {
|
|
194
|
+
const node = _step.value;
|
|
195
|
+
const newNode = yield instance.createNode({
|
|
196
|
+
type: node.type,
|
|
197
|
+
config: node.config,
|
|
198
|
+
title: node.title,
|
|
199
|
+
branchIndex: node.branchIndex
|
|
200
|
+
}, {
|
|
201
|
+
transaction
|
|
202
|
+
}); // NOTE: keep original node references for later replacement
|
|
203
|
+
|
|
204
|
+
oldToNew.set(node.id, newNode);
|
|
205
|
+
newToOld.set(newNode.id, node);
|
|
206
|
+
}
|
|
207
|
+
} catch (err) {
|
|
208
|
+
_didIteratorError = true;
|
|
209
|
+
_iteratorError = err;
|
|
210
|
+
} finally {
|
|
211
|
+
try {
|
|
212
|
+
if (_iteratorAbruptCompletion && _iterator.return != null) {
|
|
213
|
+
yield _iterator.return();
|
|
214
|
+
}
|
|
215
|
+
} finally {
|
|
216
|
+
if (_didIteratorError) {
|
|
217
|
+
throw _iteratorError;
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
var _iteratorAbruptCompletion2 = false;
|
|
223
|
+
var _didIteratorError2 = false;
|
|
224
|
+
|
|
225
|
+
var _iteratorError2;
|
|
226
|
+
|
|
227
|
+
try {
|
|
228
|
+
for (var _iterator2 = _asyncIterator(oldToNew.entries()), _step2; _iteratorAbruptCompletion2 = !(_step2 = yield _iterator2.next()).done; _iteratorAbruptCompletion2 = false) {
|
|
229
|
+
var _newUpstream$id, _newDownstream$id;
|
|
230
|
+
|
|
231
|
+
const _step2$value = _slicedToArray(_step2.value, 2),
|
|
232
|
+
oldId = _step2$value[0],
|
|
233
|
+
newNode = _step2$value[1];
|
|
234
|
+
|
|
235
|
+
const oldNode = originalNodesMap.get(oldId);
|
|
236
|
+
const newUpstream = oldNode.upstreamId ? oldToNew.get(oldNode.upstreamId) : null;
|
|
237
|
+
const newDownstream = oldNode.downstreamId ? oldToNew.get(oldNode.downstreamId) : null;
|
|
238
|
+
yield newNode.update({
|
|
239
|
+
upstreamId: (_newUpstream$id = newUpstream === null || newUpstream === void 0 ? void 0 : newUpstream.id) !== null && _newUpstream$id !== void 0 ? _newUpstream$id : null,
|
|
240
|
+
downstreamId: (_newDownstream$id = newDownstream === null || newDownstream === void 0 ? void 0 : newDownstream.id) !== null && _newDownstream$id !== void 0 ? _newDownstream$id : null,
|
|
241
|
+
config: migrateConfig(oldNode.config, oldToNew)
|
|
242
|
+
}, {
|
|
243
|
+
transaction
|
|
244
|
+
});
|
|
245
|
+
}
|
|
246
|
+
} catch (err) {
|
|
247
|
+
_didIteratorError2 = true;
|
|
248
|
+
_iteratorError2 = err;
|
|
249
|
+
} finally {
|
|
250
|
+
try {
|
|
251
|
+
if (_iteratorAbruptCompletion2 && _iterator2.return != null) {
|
|
252
|
+
yield _iterator2.return();
|
|
253
|
+
}
|
|
254
|
+
} finally {
|
|
255
|
+
if (_didIteratorError2) {
|
|
256
|
+
throw _iteratorError2;
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
return instance;
|
|
262
|
+
});
|
|
263
|
+
|
|
264
|
+
return function (_x6) {
|
|
265
|
+
return _ref2.apply(this, arguments);
|
|
266
|
+
};
|
|
267
|
+
}());
|
|
268
|
+
yield next();
|
|
269
|
+
});
|
|
270
|
+
return _duplicate.apply(this, arguments);
|
|
271
|
+
}
|