@nocobase/plugin-workflow 1.7.0-beta.9 → 1.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/3c67def1831d0b23.js +10 -0
- package/dist/client/WorkflowCategoryTabs.d.ts +10 -0
- package/dist/client/WorkflowCollectionsProvider.d.ts +10 -0
- package/dist/client/WorkflowTasks.d.ts +12 -3
- package/dist/client/a4111333ce86663c.js +10 -0
- package/dist/client/a54c8c009524b6da.js +10 -0
- package/dist/client/components/EmunerationField.d.ts +9 -0
- package/dist/client/f9eeb46e5c6a9ffe.js +10 -0
- package/dist/client/index.d.ts +1 -0
- package/dist/client/index.js +1 -1
- package/dist/client/locale/index.d.ts +2 -1
- package/dist/client/schemas/executions.d.ts +126 -125
- package/dist/client/triggers/schedule/RepeatField.d.ts +2 -1
- package/dist/common/collections/executions.d.ts +136 -0
- package/dist/common/collections/executions.js +125 -0
- package/dist/common/collections/flow_nodes.d.ts +65 -0
- package/dist/common/collections/flow_nodes.js +94 -0
- package/dist/common/collections/jobs.d.ts +37 -0
- package/dist/common/collections/jobs.js +74 -0
- package/dist/common/collections/userWorkflowTasks.d.ts +37 -0
- package/dist/common/collections/userWorkflowTasks.js +65 -0
- package/dist/common/collections/workflowCategories.d.ts +65 -0
- package/dist/common/collections/workflowCategories.js +68 -0
- package/dist/common/collections/workflowCategoryRelations.d.ts +21 -0
- package/dist/common/collections/workflowCategoryRelations.js +51 -0
- package/dist/common/collections/workflowStats.d.ts +37 -0
- package/dist/common/collections/workflowStats.js +59 -0
- package/dist/common/collections/workflowTasks.d.ts +10 -0
- package/dist/common/collections/workflowTasks.js +64 -0
- package/dist/common/collections/workflowVersionStats.d.ts +37 -0
- package/dist/common/collections/workflowVersionStats.js +59 -0
- package/dist/common/collections/workflows.d.ts +263 -0
- package/dist/common/collections/workflows.js +244 -0
- package/dist/common/constants.d.ts +9 -0
- package/dist/{server/actions/workflowTasks.js → common/constants.js} +6 -17
- package/dist/externalVersion.js +12 -11
- package/dist/locale/en-US.json +127 -15
- package/dist/locale/zh-CN.json +10 -1
- package/dist/node_modules/cron-parser/package.json +1 -1
- package/dist/node_modules/lru-cache/package.json +1 -1
- package/dist/node_modules/nodejs-snowflake/LICENSE +201 -0
- package/dist/node_modules/nodejs-snowflake/nodejs_snowflake.d.ts +62 -0
- package/dist/node_modules/nodejs-snowflake/nodejs_snowflake.js +1 -0
- package/dist/node_modules/nodejs-snowflake/nodejs_snowflake_bg.wasm +0 -0
- package/dist/node_modules/nodejs-snowflake/package.json +1 -0
- package/dist/server/Dispatcher.d.ts +11 -0
- package/dist/server/Dispatcher.js +35 -0
- package/dist/server/Plugin.d.ts +12 -2
- package/dist/server/Plugin.js +143 -105
- package/dist/server/Processor.d.ts +4 -11
- package/dist/server/Processor.js +50 -45
- package/dist/server/actions/index.js +2 -2
- package/dist/server/actions/nodes.js +7 -5
- package/dist/server/actions/{workflowTasks.d.ts → userWorkflowTasks.d.ts} +1 -1
- package/dist/server/actions/userWorkflowTasks.js +54 -0
- package/dist/server/actions/workflows.js +6 -3
- package/dist/server/collections/executions.js +12 -44
- package/dist/server/collections/flow_nodes.js +12 -57
- package/dist/server/collections/jobs.js +12 -36
- package/dist/server/collections/userWorkflowTasks.d.ts +11 -0
- package/dist/server/collections/userWorkflowTasks.js +43 -0
- package/dist/server/collections/workflowCategories.d.ts +11 -0
- package/dist/server/collections/workflowCategories.js +43 -0
- package/dist/server/collections/workflowCategoryRelations.d.ts +11 -0
- package/dist/server/collections/workflowCategoryRelations.js +43 -0
- package/dist/server/collections/workflowStats.d.ts +11 -0
- package/dist/server/collections/workflowStats.js +43 -0
- package/dist/server/collections/workflowTasks.d.ts +2 -1
- package/dist/server/collections/workflowTasks.js +12 -33
- package/dist/server/collections/workflowVersionStats.d.ts +11 -0
- package/dist/server/collections/workflowVersionStats.js +43 -0
- package/dist/server/collections/workflows.d.ts +2 -1
- package/dist/server/collections/workflows.js +12 -101
- package/dist/server/migrations/20250320223415-stats.d.ts +14 -0
- package/dist/server/migrations/20250320223415-stats.js +82 -0
- package/dist/server/migrations/20250409164913-remove-jobs-auto-increment.d.ts +14 -0
- package/dist/server/migrations/20250409164913-remove-jobs-auto-increment.js +57 -0
- package/dist/server/repositories/WorkflowRepository.js +3 -2
- package/dist/server/triggers/CollectionTrigger.js +3 -2
- package/dist/server/triggers/ScheduleTrigger/DateFieldScheduleTrigger.d.ts +5 -3
- package/dist/server/triggers/ScheduleTrigger/DateFieldScheduleTrigger.js +39 -36
- package/dist/server/triggers/ScheduleTrigger/StaticScheduleTrigger.d.ts +4 -2
- package/dist/server/triggers/ScheduleTrigger/StaticScheduleTrigger.js +26 -24
- package/dist/server/triggers/ScheduleTrigger/index.d.ts +2 -1
- package/dist/server/triggers/ScheduleTrigger/index.js +4 -8
- package/dist/server/triggers/index.d.ts +1 -1
- package/dist/server/types/Workflow.d.ts +0 -2
- package/dist/swagger/index.d.ts +0 -14
- package/dist/swagger/index.js +0 -14
- package/package.json +6 -4
- package/dist/client/739d458621edf81f.js +0 -10
- package/dist/client/8e96ce6ed324ce69.js +0 -10
- package/dist/client/c107ec5004b8644b.js +0 -10
- package/dist/client/c3f36ae11fcc489e.js +0 -10
- package/dist/client/nodes/output.d.ts +0 -31
package/dist/server/Plugin.d.ts
CHANGED
|
@@ -6,6 +6,7 @@
|
|
|
6
6
|
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
7
|
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
8
|
*/
|
|
9
|
+
import { Snowflake } from 'nodejs-snowflake';
|
|
9
10
|
import { Transactionable } from 'sequelize';
|
|
10
11
|
import { Plugin } from '@nocobase/server';
|
|
11
12
|
import { Registry } from '@nocobase/utils';
|
|
@@ -14,7 +15,7 @@ import Processor from './Processor';
|
|
|
14
15
|
import { CustomFunction } from './functions';
|
|
15
16
|
import Trigger from './triggers';
|
|
16
17
|
import { InstructionInterface } from './instructions';
|
|
17
|
-
import type { ExecutionModel, WorkflowModel
|
|
18
|
+
import type { ExecutionModel, WorkflowModel } from './types';
|
|
18
19
|
type ID = number | string;
|
|
19
20
|
export type EventOptions = {
|
|
20
21
|
eventKey?: string;
|
|
@@ -31,6 +32,7 @@ export default class PluginWorkflowServer extends Plugin {
|
|
|
31
32
|
triggers: Registry<Trigger>;
|
|
32
33
|
functions: Registry<CustomFunction>;
|
|
33
34
|
enabledCache: Map<number, WorkflowModel>;
|
|
35
|
+
snowflake: Snowflake;
|
|
34
36
|
private ready;
|
|
35
37
|
private executing;
|
|
36
38
|
private pending;
|
|
@@ -40,6 +42,11 @@ export default class PluginWorkflowServer extends Plugin {
|
|
|
40
42
|
private meter;
|
|
41
43
|
private checker;
|
|
42
44
|
private onBeforeSave;
|
|
45
|
+
private onAfterCreate;
|
|
46
|
+
private onAfterUpdate;
|
|
47
|
+
private onAfterDestroy;
|
|
48
|
+
private onAfterStart;
|
|
49
|
+
private onBeforeStop;
|
|
43
50
|
handleSyncMessage(message: any): Promise<void>;
|
|
44
51
|
/**
|
|
45
52
|
* @experimental
|
|
@@ -91,6 +98,9 @@ export default class PluginWorkflowServer extends Plugin {
|
|
|
91
98
|
/**
|
|
92
99
|
* @experimental
|
|
93
100
|
*/
|
|
94
|
-
|
|
101
|
+
updateTasksStats(userId: number, type: string, stats: {
|
|
102
|
+
pending: number;
|
|
103
|
+
all: number;
|
|
104
|
+
}, { transaction }: Transactionable): Promise<void>;
|
|
95
105
|
}
|
|
96
106
|
export {};
|
package/dist/server/Plugin.js
CHANGED
|
@@ -41,6 +41,7 @@ __export(Plugin_exports, {
|
|
|
41
41
|
module.exports = __toCommonJS(Plugin_exports);
|
|
42
42
|
var import_path = __toESM(require("path"));
|
|
43
43
|
var import_crypto = require("crypto");
|
|
44
|
+
var import_nodejs_snowflake = require("nodejs-snowflake");
|
|
44
45
|
var import_sequelize = require("sequelize");
|
|
45
46
|
var import_lru_cache = __toESM(require("lru-cache"));
|
|
46
47
|
var import_database = require("@nocobase/database");
|
|
@@ -60,12 +61,12 @@ var import_DestroyInstruction = __toESM(require("./instructions/DestroyInstructi
|
|
|
60
61
|
var import_QueryInstruction = __toESM(require("./instructions/QueryInstruction"));
|
|
61
62
|
var import_UpdateInstruction = __toESM(require("./instructions/UpdateInstruction"));
|
|
62
63
|
var import_WorkflowRepository = __toESM(require("./repositories/WorkflowRepository"));
|
|
63
|
-
var import_WorkflowTasksRepository = __toESM(require("./repositories/WorkflowTasksRepository"));
|
|
64
64
|
class PluginWorkflowServer extends import_server.Plugin {
|
|
65
65
|
instructions = new import_utils.Registry();
|
|
66
66
|
triggers = new import_utils.Registry();
|
|
67
67
|
functions = new import_utils.Registry();
|
|
68
68
|
enabledCache = /* @__PURE__ */ new Map();
|
|
69
|
+
snowflake;
|
|
69
70
|
ready = false;
|
|
70
71
|
executing = null;
|
|
71
72
|
pending = [];
|
|
@@ -74,8 +75,14 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
74
75
|
loggerCache;
|
|
75
76
|
meter = null;
|
|
76
77
|
checker = null;
|
|
77
|
-
onBeforeSave = async (instance, { transaction }) => {
|
|
78
|
+
onBeforeSave = async (instance, { transaction, cycling }) => {
|
|
79
|
+
if (cycling) {
|
|
80
|
+
return;
|
|
81
|
+
}
|
|
78
82
|
const Model = instance.constructor;
|
|
83
|
+
if (!instance.key) {
|
|
84
|
+
instance.set("key", (0, import_utils.uid)());
|
|
85
|
+
}
|
|
79
86
|
if (instance.enabled) {
|
|
80
87
|
instance.set("current", true);
|
|
81
88
|
}
|
|
@@ -91,18 +98,94 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
91
98
|
});
|
|
92
99
|
if (!previous) {
|
|
93
100
|
instance.set("current", true);
|
|
94
|
-
}
|
|
95
|
-
if (instance.current && previous) {
|
|
101
|
+
} else if (instance.current) {
|
|
96
102
|
await previous.update(
|
|
97
103
|
{ enabled: false, current: null },
|
|
98
104
|
{
|
|
99
105
|
transaction,
|
|
100
|
-
|
|
106
|
+
cycling: true
|
|
101
107
|
}
|
|
102
108
|
);
|
|
103
109
|
this.toggle(previous, false, { transaction });
|
|
104
110
|
}
|
|
105
111
|
};
|
|
112
|
+
onAfterCreate = async (model, { transaction }) => {
|
|
113
|
+
const WorkflowStatsModel = this.db.getModel("workflowStats");
|
|
114
|
+
let stats = await WorkflowStatsModel.findOne({
|
|
115
|
+
where: { key: model.key },
|
|
116
|
+
transaction
|
|
117
|
+
});
|
|
118
|
+
if (!stats) {
|
|
119
|
+
stats = await model.createStats({ executed: 0 }, { transaction });
|
|
120
|
+
}
|
|
121
|
+
model.stats = stats;
|
|
122
|
+
model.versionStats = await model.createVersionStats({ id: model.id }, { transaction });
|
|
123
|
+
if (model.enabled) {
|
|
124
|
+
this.toggle(model, true, { transaction });
|
|
125
|
+
}
|
|
126
|
+
};
|
|
127
|
+
onAfterUpdate = async (model, { transaction }) => {
|
|
128
|
+
model.stats = await model.getStats({ transaction });
|
|
129
|
+
model.versionStats = await model.getVersionStats({ transaction });
|
|
130
|
+
this.toggle(model, model.enabled, { transaction });
|
|
131
|
+
};
|
|
132
|
+
onAfterDestroy = async (model, { transaction }) => {
|
|
133
|
+
this.toggle(model, false, { transaction });
|
|
134
|
+
const TaskRepo = this.db.getRepository("workflowTasks");
|
|
135
|
+
await TaskRepo.destroy({
|
|
136
|
+
filter: {
|
|
137
|
+
workflowId: model.id
|
|
138
|
+
},
|
|
139
|
+
transaction
|
|
140
|
+
});
|
|
141
|
+
};
|
|
142
|
+
// [Life Cycle]:
|
|
143
|
+
// * load all workflows in db
|
|
144
|
+
// * add all hooks for enabled workflows
|
|
145
|
+
// * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks
|
|
146
|
+
onAfterStart = async () => {
|
|
147
|
+
this.ready = true;
|
|
148
|
+
const collection = this.db.getCollection("workflows");
|
|
149
|
+
const workflows = await collection.repository.find({
|
|
150
|
+
filter: { enabled: true },
|
|
151
|
+
appends: ["stats", "versionStats"]
|
|
152
|
+
});
|
|
153
|
+
for (const workflow of workflows) {
|
|
154
|
+
if (!workflow.stats) {
|
|
155
|
+
workflow.stats = await workflow.createStats({ executed: 0 });
|
|
156
|
+
}
|
|
157
|
+
if (!workflow.versionStats) {
|
|
158
|
+
workflow.versionStats = await workflow.createVersionStats({ executed: 0 });
|
|
159
|
+
}
|
|
160
|
+
this.toggle(workflow, true, { silent: true });
|
|
161
|
+
}
|
|
162
|
+
this.checker = setInterval(() => {
|
|
163
|
+
this.getLogger("dispatcher").info(`(cycling) check for queueing executions`);
|
|
164
|
+
this.dispatch();
|
|
165
|
+
}, 3e5);
|
|
166
|
+
this.app.on("workflow:dispatch", () => {
|
|
167
|
+
this.app.logger.info("workflow:dispatch");
|
|
168
|
+
this.dispatch();
|
|
169
|
+
});
|
|
170
|
+
this.getLogger("dispatcher").info("(starting) check for queueing executions");
|
|
171
|
+
this.dispatch();
|
|
172
|
+
this.ready = true;
|
|
173
|
+
};
|
|
174
|
+
onBeforeStop = async () => {
|
|
175
|
+
for (const workflow of this.enabledCache.values()) {
|
|
176
|
+
this.toggle(workflow, false, { silent: true });
|
|
177
|
+
}
|
|
178
|
+
this.ready = false;
|
|
179
|
+
if (this.events.length) {
|
|
180
|
+
await this.prepare();
|
|
181
|
+
}
|
|
182
|
+
if (this.executing) {
|
|
183
|
+
await this.executing;
|
|
184
|
+
}
|
|
185
|
+
if (this.checker) {
|
|
186
|
+
clearInterval(this.checker);
|
|
187
|
+
}
|
|
188
|
+
};
|
|
106
189
|
async handleSyncMessage(message) {
|
|
107
190
|
if (message.type === "statusChange") {
|
|
108
191
|
if (message.enabled) {
|
|
@@ -193,8 +276,14 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
193
276
|
}
|
|
194
277
|
async beforeLoad() {
|
|
195
278
|
this.db.registerRepositories({
|
|
196
|
-
WorkflowRepository: import_WorkflowRepository.default
|
|
197
|
-
|
|
279
|
+
WorkflowRepository: import_WorkflowRepository.default
|
|
280
|
+
});
|
|
281
|
+
const PluginRepo = this.db.getRepository("applicationPlugins");
|
|
282
|
+
const pluginRecord = await PluginRepo.findOne({
|
|
283
|
+
filter: { name: this.name }
|
|
284
|
+
});
|
|
285
|
+
this.snowflake = new import_nodejs_snowflake.Snowflake({
|
|
286
|
+
custom_epoch: pluginRecord == null ? void 0 : pluginRecord.createdAt.getTime()
|
|
198
287
|
});
|
|
199
288
|
}
|
|
200
289
|
/**
|
|
@@ -230,77 +319,22 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
230
319
|
"flow_nodes:update",
|
|
231
320
|
"flow_nodes:destroy",
|
|
232
321
|
"flow_nodes:test",
|
|
233
|
-
"jobs:get"
|
|
322
|
+
"jobs:get",
|
|
323
|
+
"workflowCategories:*"
|
|
234
324
|
]
|
|
235
325
|
});
|
|
236
326
|
this.app.acl.registerSnippet({
|
|
237
327
|
name: "ui.workflows",
|
|
238
328
|
actions: ["workflows:list"]
|
|
239
329
|
});
|
|
240
|
-
this.app.acl.allow("
|
|
330
|
+
this.app.acl.allow("userWorkflowTasks", "listMine", "loggedIn");
|
|
241
331
|
this.app.acl.allow("*", ["trigger"], "loggedIn");
|
|
242
|
-
this.db.addMigrations({
|
|
243
|
-
namespace: this.name,
|
|
244
|
-
directory: import_path.default.resolve(__dirname, "migrations"),
|
|
245
|
-
context: {
|
|
246
|
-
plugin: this
|
|
247
|
-
}
|
|
248
|
-
});
|
|
249
332
|
db.on("workflows.beforeSave", this.onBeforeSave);
|
|
250
|
-
db.on("workflows.afterCreate",
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
db.on(
|
|
256
|
-
"workflows.afterUpdate",
|
|
257
|
-
(model, { transaction }) => this.toggle(model, model.enabled, { transaction })
|
|
258
|
-
);
|
|
259
|
-
db.on("workflows.afterDestroy", async (model, { transaction }) => {
|
|
260
|
-
this.toggle(model, false, { transaction });
|
|
261
|
-
const TaskRepo = this.db.getRepository("workflowTasks");
|
|
262
|
-
await TaskRepo.destroy({
|
|
263
|
-
filter: {
|
|
264
|
-
workflowId: model.id
|
|
265
|
-
},
|
|
266
|
-
transaction
|
|
267
|
-
});
|
|
268
|
-
});
|
|
269
|
-
this.app.on("afterStart", async () => {
|
|
270
|
-
this.ready = true;
|
|
271
|
-
const collection = db.getCollection("workflows");
|
|
272
|
-
const workflows = await collection.repository.find({
|
|
273
|
-
filter: { enabled: true }
|
|
274
|
-
});
|
|
275
|
-
workflows.forEach((workflow) => {
|
|
276
|
-
this.toggle(workflow, true, { silent: true });
|
|
277
|
-
});
|
|
278
|
-
this.checker = setInterval(() => {
|
|
279
|
-
this.getLogger("dispatcher").info(`(cycling) check for queueing executions`);
|
|
280
|
-
this.dispatch();
|
|
281
|
-
}, 3e5);
|
|
282
|
-
this.app.on("workflow:dispatch", () => {
|
|
283
|
-
this.app.logger.info("workflow:dispatch");
|
|
284
|
-
this.dispatch();
|
|
285
|
-
});
|
|
286
|
-
this.getLogger("dispatcher").info("(starting) check for queueing executions");
|
|
287
|
-
this.dispatch();
|
|
288
|
-
});
|
|
289
|
-
this.app.on("beforeStop", async () => {
|
|
290
|
-
for (const workflow of this.enabledCache.values()) {
|
|
291
|
-
this.toggle(workflow, false, { silent: true });
|
|
292
|
-
}
|
|
293
|
-
this.ready = false;
|
|
294
|
-
if (this.events.length) {
|
|
295
|
-
await this.prepare();
|
|
296
|
-
}
|
|
297
|
-
if (this.executing) {
|
|
298
|
-
await this.executing;
|
|
299
|
-
}
|
|
300
|
-
if (this.checker) {
|
|
301
|
-
clearInterval(this.checker);
|
|
302
|
-
}
|
|
303
|
-
});
|
|
333
|
+
db.on("workflows.afterCreate", this.onAfterCreate);
|
|
334
|
+
db.on("workflows.afterUpdate", this.onAfterUpdate);
|
|
335
|
+
db.on("workflows.afterDestroy", this.onAfterDestroy);
|
|
336
|
+
this.app.on("afterStart", this.onAfterStart);
|
|
337
|
+
this.app.on("beforeStop", this.onBeforeStop);
|
|
304
338
|
}
|
|
305
339
|
toggle(workflow, enable, { silent, transaction } = {}) {
|
|
306
340
|
const type = workflow.get("type");
|
|
@@ -314,11 +348,14 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
314
348
|
const prev = workflow.previous();
|
|
315
349
|
if (prev.config) {
|
|
316
350
|
trigger.off({ ...workflow.get(), ...prev });
|
|
351
|
+
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id} based on configuration before updated`);
|
|
317
352
|
}
|
|
318
353
|
trigger.on(workflow);
|
|
354
|
+
this.getLogger(workflow.id).info(`toggle ON workflow ${workflow.id}`);
|
|
319
355
|
this.enabledCache.set(workflow.id, workflow);
|
|
320
356
|
} else {
|
|
321
357
|
trigger.off(workflow);
|
|
358
|
+
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id}`);
|
|
322
359
|
this.enabledCache.delete(workflow.id);
|
|
323
360
|
}
|
|
324
361
|
if (!silent) {
|
|
@@ -470,21 +507,20 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
470
507
|
throw err;
|
|
471
508
|
}
|
|
472
509
|
this.getLogger(workflow.id).info(`execution of workflow ${workflow.id} created as ${execution.id}`);
|
|
473
|
-
|
|
510
|
+
if (!workflow.stats) {
|
|
511
|
+
workflow.stats = await workflow.getStats({ transaction });
|
|
512
|
+
}
|
|
513
|
+
await workflow.stats.increment("executed", { transaction });
|
|
474
514
|
if (this.db.options.dialect !== "postgres") {
|
|
475
|
-
await workflow.reload({ transaction });
|
|
515
|
+
await workflow.stats.reload({ transaction });
|
|
516
|
+
}
|
|
517
|
+
if (!workflow.versionStats) {
|
|
518
|
+
workflow.versionStats = await workflow.getVersionStats({ transaction });
|
|
519
|
+
}
|
|
520
|
+
await workflow.versionStats.increment("executed", { transaction });
|
|
521
|
+
if (this.db.options.dialect !== "postgres") {
|
|
522
|
+
await workflow.versionStats.reload({ transaction });
|
|
476
523
|
}
|
|
477
|
-
await workflow.constructor.update(
|
|
478
|
-
{
|
|
479
|
-
allExecuted: workflow.allExecuted
|
|
480
|
-
},
|
|
481
|
-
{
|
|
482
|
-
where: {
|
|
483
|
-
key: workflow.key
|
|
484
|
-
},
|
|
485
|
-
transaction
|
|
486
|
-
}
|
|
487
|
-
);
|
|
488
524
|
if (!sameTransaction) {
|
|
489
525
|
await transaction.commit();
|
|
490
526
|
}
|
|
@@ -638,36 +674,38 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
638
674
|
/**
|
|
639
675
|
* @experimental
|
|
640
676
|
*/
|
|
641
|
-
async
|
|
677
|
+
async updateTasksStats(userId, type, stats = { pending: 0, all: 0 }, { transaction }) {
|
|
642
678
|
const { db } = this.app;
|
|
643
|
-
const repository = db.getRepository("
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
679
|
+
const repository = db.getRepository("userWorkflowTasks");
|
|
680
|
+
let record = await repository.findOne({
|
|
681
|
+
filter: {
|
|
682
|
+
userId,
|
|
683
|
+
type
|
|
684
|
+
},
|
|
685
|
+
transaction
|
|
686
|
+
});
|
|
687
|
+
if (record) {
|
|
688
|
+
await record.update(
|
|
689
|
+
{
|
|
690
|
+
stats
|
|
649
691
|
},
|
|
650
|
-
transaction
|
|
651
|
-
|
|
692
|
+
{ transaction }
|
|
693
|
+
);
|
|
652
694
|
} else {
|
|
653
|
-
await repository.
|
|
654
|
-
|
|
655
|
-
|
|
695
|
+
record = await repository.create({
|
|
696
|
+
values: {
|
|
697
|
+
userId,
|
|
698
|
+
type,
|
|
699
|
+
stats
|
|
700
|
+
},
|
|
656
701
|
transaction
|
|
657
702
|
});
|
|
658
703
|
}
|
|
659
|
-
if (
|
|
660
|
-
const counts = await repository.countAll({
|
|
661
|
-
where: {
|
|
662
|
-
userId: task.userId,
|
|
663
|
-
workflowId: { [import_database.Op.ne]: null }
|
|
664
|
-
},
|
|
665
|
-
transaction
|
|
666
|
-
}) || [];
|
|
704
|
+
if (userId) {
|
|
667
705
|
this.app.emit("ws:sendToTag", {
|
|
668
706
|
tagKey: "userId",
|
|
669
|
-
tagValue: `${
|
|
670
|
-
message: { type: "workflow:tasks:updated", payload:
|
|
707
|
+
tagValue: `${userId}`,
|
|
708
|
+
message: { type: "workflow:tasks:updated", payload: record.get() }
|
|
671
709
|
});
|
|
672
710
|
}
|
|
673
711
|
}
|
|
@@ -44,16 +44,9 @@ export default class Processor {
|
|
|
44
44
|
* @experimental
|
|
45
45
|
*/
|
|
46
46
|
nodesMap: Map<number, FlowNodeModel>;
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
jobsMap: Map<number, JobModel>;
|
|
51
|
-
/**
|
|
52
|
-
* @experimental
|
|
53
|
-
*/
|
|
54
|
-
jobsMapByNodeKey: {
|
|
55
|
-
[key: string]: any;
|
|
56
|
-
};
|
|
47
|
+
private jobsMapByNodeKey;
|
|
48
|
+
private jobResultsMapByNodeKey;
|
|
49
|
+
private jobsToSave;
|
|
57
50
|
/**
|
|
58
51
|
* @experimental
|
|
59
52
|
*/
|
|
@@ -72,7 +65,7 @@ export default class Processor {
|
|
|
72
65
|
/**
|
|
73
66
|
* @experimental
|
|
74
67
|
*/
|
|
75
|
-
saveJob(payload: JobModel | Record<string, any>):
|
|
68
|
+
saveJob(payload: JobModel | Record<string, any>): JobModel;
|
|
76
69
|
/**
|
|
77
70
|
* @experimental
|
|
78
71
|
*/
|
package/dist/server/Processor.js
CHANGED
|
@@ -78,14 +78,9 @@ class Processor {
|
|
|
78
78
|
* @experimental
|
|
79
79
|
*/
|
|
80
80
|
nodesMap = /* @__PURE__ */ new Map();
|
|
81
|
-
/**
|
|
82
|
-
* @experimental
|
|
83
|
-
*/
|
|
84
|
-
jobsMap = /* @__PURE__ */ new Map();
|
|
85
|
-
/**
|
|
86
|
-
* @experimental
|
|
87
|
-
*/
|
|
88
81
|
jobsMapByNodeKey = {};
|
|
82
|
+
jobResultsMapByNodeKey = {};
|
|
83
|
+
jobsToSave = /* @__PURE__ */ new Map();
|
|
89
84
|
/**
|
|
90
85
|
* @experimental
|
|
91
86
|
*/
|
|
@@ -107,9 +102,9 @@ class Processor {
|
|
|
107
102
|
}
|
|
108
103
|
makeJobs(jobs) {
|
|
109
104
|
jobs.forEach((job) => {
|
|
110
|
-
this.jobsMap.set(job.id, job);
|
|
111
105
|
const node = this.nodesMap.get(job.nodeId);
|
|
112
|
-
this.jobsMapByNodeKey[node.key] = job
|
|
106
|
+
this.jobsMapByNodeKey[node.key] = job;
|
|
107
|
+
this.jobResultsMapByNodeKey[node.key] = job.result;
|
|
113
108
|
});
|
|
114
109
|
}
|
|
115
110
|
async prepare() {
|
|
@@ -123,11 +118,13 @@ class Processor {
|
|
|
123
118
|
execution.workflow = plugin.enabledCache.get(execution.workflowId) || await execution.getWorkflow({ transaction });
|
|
124
119
|
}
|
|
125
120
|
const nodes = await execution.workflow.getNodes({ transaction });
|
|
121
|
+
execution.workflow.nodes = nodes;
|
|
126
122
|
this.makeNodes(nodes);
|
|
127
123
|
const jobs = await execution.getJobs({
|
|
128
124
|
order: [["id", "ASC"]],
|
|
129
125
|
transaction
|
|
130
126
|
});
|
|
127
|
+
execution.jobs = jobs;
|
|
131
128
|
this.makeJobs(jobs);
|
|
132
129
|
}
|
|
133
130
|
async start() {
|
|
@@ -179,11 +176,10 @@ class Processor {
|
|
|
179
176
|
}
|
|
180
177
|
}
|
|
181
178
|
if (!(job instanceof import_database.Model)) {
|
|
182
|
-
job.upstreamId = prevJob instanceof import_database.Model ? prevJob.get("id") : null;
|
|
183
179
|
job.nodeId = node.id;
|
|
184
180
|
job.nodeKey = node.key;
|
|
185
181
|
}
|
|
186
|
-
const savedJob =
|
|
182
|
+
const savedJob = this.saveJob(job);
|
|
187
183
|
this.logger.info(
|
|
188
184
|
`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) finished as status: ${savedJob.status}`
|
|
189
185
|
);
|
|
@@ -230,6 +226,30 @@ class Processor {
|
|
|
230
226
|
return this.exec(instruction.resume.bind(instruction), node, job);
|
|
231
227
|
}
|
|
232
228
|
async exit(s) {
|
|
229
|
+
if (this.jobsToSave.size) {
|
|
230
|
+
const newJobs = [];
|
|
231
|
+
for (const job of this.jobsToSave.values()) {
|
|
232
|
+
if (job.isNewRecord) {
|
|
233
|
+
newJobs.push(job);
|
|
234
|
+
} else {
|
|
235
|
+
await job.save({ transaction: this.mainTransaction });
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
if (newJobs.length) {
|
|
239
|
+
const JobsModel = this.options.plugin.db.getModel("jobs");
|
|
240
|
+
await JobsModel.bulkCreate(
|
|
241
|
+
newJobs.map((job) => job.toJSON()),
|
|
242
|
+
{
|
|
243
|
+
transaction: this.mainTransaction,
|
|
244
|
+
returning: false
|
|
245
|
+
}
|
|
246
|
+
);
|
|
247
|
+
for (const job of newJobs) {
|
|
248
|
+
job.isNewRecord = false;
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
this.jobsToSave.clear();
|
|
252
|
+
}
|
|
233
253
|
if (typeof s === "number") {
|
|
234
254
|
const status = this.constructor.StatusMap[s] ?? Math.sign(s);
|
|
235
255
|
await this.execution.update({ status }, { transaction: this.mainTransaction });
|
|
@@ -240,32 +260,29 @@ class Processor {
|
|
|
240
260
|
this.logger.info(`execution (${this.execution.id}) exiting with status ${this.execution.status}`);
|
|
241
261
|
return null;
|
|
242
262
|
}
|
|
243
|
-
// TODO(optimize)
|
|
244
263
|
/**
|
|
245
264
|
* @experimental
|
|
246
265
|
*/
|
|
247
|
-
|
|
266
|
+
saveJob(payload) {
|
|
248
267
|
const { database } = this.execution.constructor;
|
|
249
|
-
const { mainTransaction: transaction } = this;
|
|
250
268
|
const { model } = database.getCollection("jobs");
|
|
251
269
|
let job;
|
|
252
270
|
if (payload instanceof model) {
|
|
253
|
-
job =
|
|
254
|
-
|
|
255
|
-
job = await model.findByPk(payload.id, { transaction });
|
|
256
|
-
await job.update(payload, { transaction });
|
|
271
|
+
job = payload;
|
|
272
|
+
job.set("updatedAt", /* @__PURE__ */ new Date());
|
|
257
273
|
} else {
|
|
258
|
-
job =
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
);
|
|
274
|
+
job = model.build({
|
|
275
|
+
...payload,
|
|
276
|
+
id: this.options.plugin.snowflake.getUniqueID().toString(),
|
|
277
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
278
|
+
updatedAt: /* @__PURE__ */ new Date(),
|
|
279
|
+
executionId: this.execution.id
|
|
280
|
+
});
|
|
265
281
|
}
|
|
266
|
-
this.
|
|
282
|
+
this.jobsToSave.set(job.id, job);
|
|
267
283
|
this.lastSavedJob = job;
|
|
268
|
-
this.jobsMapByNodeKey[job.nodeKey] = job
|
|
284
|
+
this.jobsMapByNodeKey[job.nodeKey] = job;
|
|
285
|
+
this.jobResultsMapByNodeKey[job.nodeKey] = job.result;
|
|
269
286
|
return job;
|
|
270
287
|
}
|
|
271
288
|
/**
|
|
@@ -319,31 +336,19 @@ class Processor {
|
|
|
319
336
|
* @experimental
|
|
320
337
|
*/
|
|
321
338
|
findBranchParentJob(job, node) {
|
|
322
|
-
|
|
323
|
-
if (j.nodeId === node.id) {
|
|
324
|
-
return j;
|
|
325
|
-
}
|
|
326
|
-
}
|
|
327
|
-
return null;
|
|
339
|
+
return this.jobsMapByNodeKey[node.key];
|
|
328
340
|
}
|
|
329
341
|
/**
|
|
330
342
|
* @experimental
|
|
331
343
|
*/
|
|
332
344
|
findBranchLastJob(node, job) {
|
|
333
|
-
const allJobs =
|
|
345
|
+
const allJobs = Object.values(this.jobsMapByNodeKey);
|
|
334
346
|
const branchJobs = [];
|
|
335
347
|
for (let n = this.findBranchEndNode(node); n && n !== node.upstream; n = n.upstream) {
|
|
336
348
|
branchJobs.push(...allJobs.filter((item) => item.nodeId === n.id));
|
|
337
349
|
}
|
|
338
|
-
branchJobs.sort((a, b) => a.
|
|
339
|
-
|
|
340
|
-
for (let j = branchJobs[i]; j && j.id !== job.id; j = this.jobsMap.get(j.upstreamId)) {
|
|
341
|
-
if (j.upstreamId === job.id) {
|
|
342
|
-
return branchJobs[i];
|
|
343
|
-
}
|
|
344
|
-
}
|
|
345
|
-
}
|
|
346
|
-
return null;
|
|
350
|
+
branchJobs.sort((a, b) => a.updatedAt.getTime() - b.updatedAt.getTime());
|
|
351
|
+
return branchJobs[branchJobs.length - 1] || null;
|
|
347
352
|
}
|
|
348
353
|
/**
|
|
349
354
|
* @experimental
|
|
@@ -362,12 +367,12 @@ class Processor {
|
|
|
362
367
|
for (let n = includeSelfScope ? node : this.findBranchParentNode(node); n; n = this.findBranchParentNode(n)) {
|
|
363
368
|
const instruction = this.options.plugin.instructions.get(n.type);
|
|
364
369
|
if (typeof (instruction == null ? void 0 : instruction.getScope) === "function") {
|
|
365
|
-
$scopes[n.id] = $scopes[n.key] = instruction.getScope(n, this.
|
|
370
|
+
$scopes[n.id] = $scopes[n.key] = instruction.getScope(n, this.jobResultsMapByNodeKey[n.key], this);
|
|
366
371
|
}
|
|
367
372
|
}
|
|
368
373
|
return {
|
|
369
374
|
$context: this.execution.context,
|
|
370
|
-
$jobsMapByNodeKey: this.
|
|
375
|
+
$jobsMapByNodeKey: this.jobResultsMapByNodeKey,
|
|
371
376
|
$system: systemFns,
|
|
372
377
|
$scopes,
|
|
373
378
|
$env: this.options.plugin.app.environment.getVariables()
|
|
@@ -42,7 +42,7 @@ module.exports = __toCommonJS(actions_exports);
|
|
|
42
42
|
var workflows = __toESM(require("./workflows"));
|
|
43
43
|
var nodes = __toESM(require("./nodes"));
|
|
44
44
|
var executions = __toESM(require("./executions"));
|
|
45
|
-
var
|
|
45
|
+
var userWorkflowTasks = __toESM(require("./userWorkflowTasks"));
|
|
46
46
|
function make(name, mod) {
|
|
47
47
|
return Object.keys(mod).reduce(
|
|
48
48
|
(result, key) => ({
|
|
@@ -64,6 +64,6 @@ function actions_default({ app }) {
|
|
|
64
64
|
test: nodes.test
|
|
65
65
|
}),
|
|
66
66
|
...make("executions", executions),
|
|
67
|
-
...make("
|
|
67
|
+
...make("userWorkflowTasks", userWorkflowTasks)
|
|
68
68
|
});
|
|
69
69
|
}
|
|
@@ -51,7 +51,9 @@ async function create(context, next) {
|
|
|
51
51
|
const { whitelist, blacklist, updateAssociationValues, values, associatedIndex: workflowId } = context.action.params;
|
|
52
52
|
context.body = await db.sequelize.transaction(async (transaction) => {
|
|
53
53
|
const workflow = await repository.getSourceModel(transaction);
|
|
54
|
-
|
|
54
|
+
workflow.versionStats = await workflow.getVersionStats({ transaction });
|
|
55
|
+
const { executed } = workflow.versionStats;
|
|
56
|
+
if (executed) {
|
|
55
57
|
context.throw(400, "Node could not be created in executed workflow");
|
|
56
58
|
}
|
|
57
59
|
const instance = await repository.create({
|
|
@@ -143,9 +145,9 @@ async function destroy(context, next) {
|
|
|
143
145
|
const instance = await repository.findOne({
|
|
144
146
|
filterByTk,
|
|
145
147
|
fields: [...fields, "workflowId"],
|
|
146
|
-
appends: ["upstream", "downstream", "workflow"]
|
|
148
|
+
appends: ["upstream", "downstream", "workflow.versionStats.executed"]
|
|
147
149
|
});
|
|
148
|
-
if (instance.workflow.executed) {
|
|
150
|
+
if (instance.workflow.versionStats.executed) {
|
|
149
151
|
context.throw(400, "Nodes in executed workflow could not be deleted");
|
|
150
152
|
}
|
|
151
153
|
await db.sequelize.transaction(async (transaction) => {
|
|
@@ -202,10 +204,10 @@ async function update(context, next) {
|
|
|
202
204
|
context.body = await db.sequelize.transaction(async (transaction) => {
|
|
203
205
|
const { workflow } = await repository.findOne({
|
|
204
206
|
filterByTk,
|
|
205
|
-
appends: ["workflow.executed"],
|
|
207
|
+
appends: ["workflow.versionStats.executed"],
|
|
206
208
|
transaction
|
|
207
209
|
});
|
|
208
|
-
if (workflow.executed) {
|
|
210
|
+
if (workflow.versionStats.executed) {
|
|
209
211
|
context.throw(400, "Nodes in executed workflow could not be reconfigured");
|
|
210
212
|
}
|
|
211
213
|
return repository.update({
|
|
@@ -7,4 +7,4 @@
|
|
|
7
7
|
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
8
|
*/
|
|
9
9
|
import { Context } from '@nocobase/actions';
|
|
10
|
-
export declare function
|
|
10
|
+
export declare function listMine(context: Context, next: any): Promise<void>;
|