@nocobase/plugin-workflow 1.7.0-alpha.10 → 1.7.0-alpha.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/3d24e559cfbba5d8.js +10 -0
- package/dist/client/90a4f6d29bd453a7.js +10 -0
- package/dist/client/WorkflowTasks.d.ts +11 -3
- package/dist/client/a2fc280565d1c746.js +10 -0
- package/dist/client/f4370978c40502d7.js +10 -0
- package/dist/client/index.d.ts +1 -0
- package/dist/client/index.js +1 -1
- package/dist/client/triggers/schedule/RepeatField.d.ts +2 -1
- package/dist/common/collections/jobs.d.ts +11 -0
- package/dist/common/collections/jobs.js +7 -0
- package/dist/externalVersion.js +10 -10
- package/dist/node_modules/cron-parser/package.json +1 -1
- package/dist/node_modules/lru-cache/package.json +1 -1
- package/dist/node_modules/nodejs-snowflake/LICENSE +201 -0
- package/dist/node_modules/nodejs-snowflake/nodejs_snowflake.d.ts +62 -0
- package/dist/node_modules/nodejs-snowflake/nodejs_snowflake.js +1 -0
- package/dist/node_modules/nodejs-snowflake/nodejs_snowflake_bg.wasm +0 -0
- package/dist/node_modules/nodejs-snowflake/package.json +1 -0
- package/dist/server/Dispatcher.d.ts +11 -0
- package/dist/server/Dispatcher.js +35 -0
- package/dist/server/Plugin.d.ts +8 -1
- package/dist/server/Plugin.js +101 -71
- package/dist/server/Processor.d.ts +4 -11
- package/dist/server/Processor.js +48 -45
- package/dist/server/migrations/20250320223415-stats.js +14 -7
- package/dist/server/migrations/20250409164913-remove-jobs-auto-increment.d.ts +14 -0
- package/dist/server/migrations/20250409164913-remove-jobs-auto-increment.js +57 -0
- package/dist/server/triggers/ScheduleTrigger/DateFieldScheduleTrigger.d.ts +4 -2
- package/dist/server/triggers/ScheduleTrigger/DateFieldScheduleTrigger.js +33 -30
- package/dist/server/triggers/ScheduleTrigger/StaticScheduleTrigger.d.ts +3 -1
- package/dist/server/triggers/ScheduleTrigger/StaticScheduleTrigger.js +23 -21
- package/dist/server/triggers/ScheduleTrigger/index.d.ts +2 -1
- package/dist/server/triggers/ScheduleTrigger/index.js +4 -8
- package/dist/server/triggers/index.d.ts +1 -1
- package/package.json +4 -3
- package/dist/client/27734873a939f873.js +0 -10
- package/dist/client/42a0063e6e3f5ab2.js +0 -10
- package/dist/client/c8053d382167e05f.js +0 -10
- package/dist/client/f5735dd4ea5f2180.js +0 -10
package/dist/server/Plugin.js
CHANGED
|
@@ -41,6 +41,7 @@ __export(Plugin_exports, {
|
|
|
41
41
|
module.exports = __toCommonJS(Plugin_exports);
|
|
42
42
|
var import_path = __toESM(require("path"));
|
|
43
43
|
var import_crypto = require("crypto");
|
|
44
|
+
var import_nodejs_snowflake = require("nodejs-snowflake");
|
|
44
45
|
var import_sequelize = require("sequelize");
|
|
45
46
|
var import_lru_cache = __toESM(require("lru-cache"));
|
|
46
47
|
var import_database = require("@nocobase/database");
|
|
@@ -66,6 +67,7 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
66
67
|
triggers = new import_utils.Registry();
|
|
67
68
|
functions = new import_utils.Registry();
|
|
68
69
|
enabledCache = /* @__PURE__ */ new Map();
|
|
70
|
+
snowflake;
|
|
69
71
|
ready = false;
|
|
70
72
|
executing = null;
|
|
71
73
|
pending = [];
|
|
@@ -106,6 +108,82 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
106
108
|
this.toggle(previous, false, { transaction });
|
|
107
109
|
}
|
|
108
110
|
};
|
|
111
|
+
onAfterCreate = async (model, { transaction }) => {
|
|
112
|
+
const WorkflowStatsModel = this.db.getModel("workflowStats");
|
|
113
|
+
let stats = await WorkflowStatsModel.findOne({
|
|
114
|
+
where: { key: model.key },
|
|
115
|
+
transaction
|
|
116
|
+
});
|
|
117
|
+
if (!stats) {
|
|
118
|
+
stats = await model.createStats({ executed: 0 }, { transaction });
|
|
119
|
+
}
|
|
120
|
+
model.stats = stats;
|
|
121
|
+
model.versionStats = await model.createVersionStats({ id: model.id }, { transaction });
|
|
122
|
+
if (model.enabled) {
|
|
123
|
+
this.toggle(model, true, { transaction });
|
|
124
|
+
}
|
|
125
|
+
};
|
|
126
|
+
onAfterUpdate = async (model, { transaction }) => {
|
|
127
|
+
model.stats = await model.getStats({ transaction });
|
|
128
|
+
model.versionStats = await model.getVersionStats({ transaction });
|
|
129
|
+
this.toggle(model, model.enabled, { transaction });
|
|
130
|
+
};
|
|
131
|
+
onAfterDestroy = async (model, { transaction }) => {
|
|
132
|
+
this.toggle(model, false, { transaction });
|
|
133
|
+
const TaskRepo = this.db.getRepository("workflowTasks");
|
|
134
|
+
await TaskRepo.destroy({
|
|
135
|
+
filter: {
|
|
136
|
+
workflowId: model.id
|
|
137
|
+
},
|
|
138
|
+
transaction
|
|
139
|
+
});
|
|
140
|
+
};
|
|
141
|
+
// [Life Cycle]:
|
|
142
|
+
// * load all workflows in db
|
|
143
|
+
// * add all hooks for enabled workflows
|
|
144
|
+
// * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks
|
|
145
|
+
onAfterStart = async () => {
|
|
146
|
+
this.ready = true;
|
|
147
|
+
const collection = this.db.getCollection("workflows");
|
|
148
|
+
const workflows = await collection.repository.find({
|
|
149
|
+
filter: { enabled: true },
|
|
150
|
+
appends: ["stats", "versionStats"]
|
|
151
|
+
});
|
|
152
|
+
for (const workflow of workflows) {
|
|
153
|
+
if (!workflow.stats) {
|
|
154
|
+
workflow.stats = await workflow.createStats({ executed: 0 });
|
|
155
|
+
}
|
|
156
|
+
if (!workflow.versionStats) {
|
|
157
|
+
workflow.versionStats = await workflow.createVersionStats({ executed: 0 });
|
|
158
|
+
}
|
|
159
|
+
this.toggle(workflow, true, { silent: true });
|
|
160
|
+
}
|
|
161
|
+
this.checker = setInterval(() => {
|
|
162
|
+
this.getLogger("dispatcher").info(`(cycling) check for queueing executions`);
|
|
163
|
+
this.dispatch();
|
|
164
|
+
}, 3e5);
|
|
165
|
+
this.app.on("workflow:dispatch", () => {
|
|
166
|
+
this.app.logger.info("workflow:dispatch");
|
|
167
|
+
this.dispatch();
|
|
168
|
+
});
|
|
169
|
+
this.getLogger("dispatcher").info("(starting) check for queueing executions");
|
|
170
|
+
this.dispatch();
|
|
171
|
+
};
|
|
172
|
+
onBeforeStop = async () => {
|
|
173
|
+
for (const workflow of this.enabledCache.values()) {
|
|
174
|
+
this.toggle(workflow, false, { silent: true });
|
|
175
|
+
}
|
|
176
|
+
this.ready = false;
|
|
177
|
+
if (this.events.length) {
|
|
178
|
+
await this.prepare();
|
|
179
|
+
}
|
|
180
|
+
if (this.executing) {
|
|
181
|
+
await this.executing;
|
|
182
|
+
}
|
|
183
|
+
if (this.checker) {
|
|
184
|
+
clearInterval(this.checker);
|
|
185
|
+
}
|
|
186
|
+
};
|
|
109
187
|
async handleSyncMessage(message) {
|
|
110
188
|
if (message.type === "statusChange") {
|
|
111
189
|
if (message.enabled) {
|
|
@@ -199,6 +277,13 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
199
277
|
WorkflowRepository: import_WorkflowRepository.default,
|
|
200
278
|
WorkflowTasksRepository: import_WorkflowTasksRepository.default
|
|
201
279
|
});
|
|
280
|
+
const PluginRepo = this.db.getRepository("applicationPlugins");
|
|
281
|
+
const pluginRecord = await PluginRepo.findOne({
|
|
282
|
+
filter: { name: this.name }
|
|
283
|
+
});
|
|
284
|
+
this.snowflake = new import_nodejs_snowflake.Snowflake({
|
|
285
|
+
custom_epoch: pluginRecord == null ? void 0 : pluginRecord.createdAt.getTime()
|
|
286
|
+
});
|
|
202
287
|
}
|
|
203
288
|
/**
|
|
204
289
|
* @internal
|
|
@@ -250,69 +335,11 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
250
335
|
}
|
|
251
336
|
});
|
|
252
337
|
db.on("workflows.beforeSave", this.onBeforeSave);
|
|
253
|
-
db.on("workflows.afterCreate",
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
transaction
|
|
259
|
-
});
|
|
260
|
-
model.stats = stats;
|
|
261
|
-
model.versionStats = await model.createVersionStats({ id: model.id }, { transaction });
|
|
262
|
-
if (model.enabled) {
|
|
263
|
-
this.toggle(model, true, { transaction });
|
|
264
|
-
}
|
|
265
|
-
});
|
|
266
|
-
db.on("workflows.afterUpdate", async (model, { transaction }) => {
|
|
267
|
-
model.stats = await model.getStats({ transaction });
|
|
268
|
-
model.versionStats = await model.getVersionStats({ transaction });
|
|
269
|
-
this.toggle(model, model.enabled, { transaction });
|
|
270
|
-
});
|
|
271
|
-
db.on("workflows.afterDestroy", async (model, { transaction }) => {
|
|
272
|
-
this.toggle(model, false, { transaction });
|
|
273
|
-
const TaskRepo = this.db.getRepository("workflowTasks");
|
|
274
|
-
await TaskRepo.destroy({
|
|
275
|
-
filter: {
|
|
276
|
-
workflowId: model.id
|
|
277
|
-
},
|
|
278
|
-
transaction
|
|
279
|
-
});
|
|
280
|
-
});
|
|
281
|
-
this.app.on("afterStart", async () => {
|
|
282
|
-
this.ready = true;
|
|
283
|
-
const collection = db.getCollection("workflows");
|
|
284
|
-
const workflows = await collection.repository.find({
|
|
285
|
-
filter: { enabled: true }
|
|
286
|
-
});
|
|
287
|
-
workflows.forEach((workflow) => {
|
|
288
|
-
this.toggle(workflow, true, { silent: true });
|
|
289
|
-
});
|
|
290
|
-
this.checker = setInterval(() => {
|
|
291
|
-
this.getLogger("dispatcher").info(`(cycling) check for queueing executions`);
|
|
292
|
-
this.dispatch();
|
|
293
|
-
}, 3e5);
|
|
294
|
-
this.app.on("workflow:dispatch", () => {
|
|
295
|
-
this.app.logger.info("workflow:dispatch");
|
|
296
|
-
this.dispatch();
|
|
297
|
-
});
|
|
298
|
-
this.getLogger("dispatcher").info("(starting) check for queueing executions");
|
|
299
|
-
this.dispatch();
|
|
300
|
-
});
|
|
301
|
-
this.app.on("beforeStop", async () => {
|
|
302
|
-
for (const workflow of this.enabledCache.values()) {
|
|
303
|
-
this.toggle(workflow, false, { silent: true });
|
|
304
|
-
}
|
|
305
|
-
this.ready = false;
|
|
306
|
-
if (this.events.length) {
|
|
307
|
-
await this.prepare();
|
|
308
|
-
}
|
|
309
|
-
if (this.executing) {
|
|
310
|
-
await this.executing;
|
|
311
|
-
}
|
|
312
|
-
if (this.checker) {
|
|
313
|
-
clearInterval(this.checker);
|
|
314
|
-
}
|
|
315
|
-
});
|
|
338
|
+
db.on("workflows.afterCreate", this.onAfterCreate);
|
|
339
|
+
db.on("workflows.afterUpdate", this.onAfterUpdate);
|
|
340
|
+
db.on("workflows.afterDestroy", this.onAfterDestroy);
|
|
341
|
+
this.app.on("afterStart", this.onAfterStart);
|
|
342
|
+
this.app.on("beforeStop", this.onBeforeStop);
|
|
316
343
|
}
|
|
317
344
|
toggle(workflow, enable, { silent, transaction } = {}) {
|
|
318
345
|
const type = workflow.get("type");
|
|
@@ -326,11 +353,14 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
326
353
|
const prev = workflow.previous();
|
|
327
354
|
if (prev.config) {
|
|
328
355
|
trigger.off({ ...workflow.get(), ...prev });
|
|
356
|
+
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id} based on configuration before updated`);
|
|
329
357
|
}
|
|
330
358
|
trigger.on(workflow);
|
|
359
|
+
this.getLogger(workflow.id).info(`toggle ON workflow ${workflow.id}`);
|
|
331
360
|
this.enabledCache.set(workflow.id, workflow);
|
|
332
361
|
} else {
|
|
333
362
|
trigger.off(workflow);
|
|
363
|
+
this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id}`);
|
|
334
364
|
this.enabledCache.delete(workflow.id);
|
|
335
365
|
}
|
|
336
366
|
if (!silent) {
|
|
@@ -649,10 +679,16 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
649
679
|
/**
|
|
650
680
|
* @experimental
|
|
651
681
|
*/
|
|
652
|
-
async toggleTaskStatus(task,
|
|
682
|
+
async toggleTaskStatus(task, on, { transaction }) {
|
|
653
683
|
const { db } = this.app;
|
|
654
684
|
const repository = db.getRepository("workflowTasks");
|
|
655
|
-
if (
|
|
685
|
+
if (on) {
|
|
686
|
+
await repository.updateOrCreate({
|
|
687
|
+
filterKeys: ["key", "type"],
|
|
688
|
+
values: task,
|
|
689
|
+
transaction
|
|
690
|
+
});
|
|
691
|
+
} else {
|
|
656
692
|
await repository.destroy({
|
|
657
693
|
filter: {
|
|
658
694
|
type: task.type,
|
|
@@ -660,12 +696,6 @@ class PluginWorkflowServer extends import_server.Plugin {
|
|
|
660
696
|
},
|
|
661
697
|
transaction
|
|
662
698
|
});
|
|
663
|
-
} else {
|
|
664
|
-
await repository.updateOrCreate({
|
|
665
|
-
filterKeys: ["key", "type"],
|
|
666
|
-
values: task,
|
|
667
|
-
transaction
|
|
668
|
-
});
|
|
669
699
|
}
|
|
670
700
|
if (task.userId) {
|
|
671
701
|
const counts = await repository.countAll({
|
|
@@ -44,16 +44,9 @@ export default class Processor {
|
|
|
44
44
|
* @experimental
|
|
45
45
|
*/
|
|
46
46
|
nodesMap: Map<number, FlowNodeModel>;
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
jobsMap: Map<number, JobModel>;
|
|
51
|
-
/**
|
|
52
|
-
* @experimental
|
|
53
|
-
*/
|
|
54
|
-
jobsMapByNodeKey: {
|
|
55
|
-
[key: string]: any;
|
|
56
|
-
};
|
|
47
|
+
private jobsMapByNodeKey;
|
|
48
|
+
private jobResultsMapByNodeKey;
|
|
49
|
+
private jobsToSave;
|
|
57
50
|
/**
|
|
58
51
|
* @experimental
|
|
59
52
|
*/
|
|
@@ -72,7 +65,7 @@ export default class Processor {
|
|
|
72
65
|
/**
|
|
73
66
|
* @experimental
|
|
74
67
|
*/
|
|
75
|
-
saveJob(payload: JobModel | Record<string, any>):
|
|
68
|
+
saveJob(payload: JobModel | Record<string, any>): JobModel;
|
|
76
69
|
/**
|
|
77
70
|
* @experimental
|
|
78
71
|
*/
|
package/dist/server/Processor.js
CHANGED
|
@@ -78,14 +78,9 @@ class Processor {
|
|
|
78
78
|
* @experimental
|
|
79
79
|
*/
|
|
80
80
|
nodesMap = /* @__PURE__ */ new Map();
|
|
81
|
-
/**
|
|
82
|
-
* @experimental
|
|
83
|
-
*/
|
|
84
|
-
jobsMap = /* @__PURE__ */ new Map();
|
|
85
|
-
/**
|
|
86
|
-
* @experimental
|
|
87
|
-
*/
|
|
88
81
|
jobsMapByNodeKey = {};
|
|
82
|
+
jobResultsMapByNodeKey = {};
|
|
83
|
+
jobsToSave = /* @__PURE__ */ new Map();
|
|
89
84
|
/**
|
|
90
85
|
* @experimental
|
|
91
86
|
*/
|
|
@@ -107,9 +102,9 @@ class Processor {
|
|
|
107
102
|
}
|
|
108
103
|
makeJobs(jobs) {
|
|
109
104
|
jobs.forEach((job) => {
|
|
110
|
-
this.jobsMap.set(job.id, job);
|
|
111
105
|
const node = this.nodesMap.get(job.nodeId);
|
|
112
|
-
this.jobsMapByNodeKey[node.key] = job
|
|
106
|
+
this.jobsMapByNodeKey[node.key] = job;
|
|
107
|
+
this.jobResultsMapByNodeKey[node.key] = job.result;
|
|
113
108
|
});
|
|
114
109
|
}
|
|
115
110
|
async prepare() {
|
|
@@ -179,11 +174,10 @@ class Processor {
|
|
|
179
174
|
}
|
|
180
175
|
}
|
|
181
176
|
if (!(job instanceof import_database.Model)) {
|
|
182
|
-
job.upstreamId = prevJob instanceof import_database.Model ? prevJob.get("id") : null;
|
|
183
177
|
job.nodeId = node.id;
|
|
184
178
|
job.nodeKey = node.key;
|
|
185
179
|
}
|
|
186
|
-
const savedJob =
|
|
180
|
+
const savedJob = this.saveJob(job);
|
|
187
181
|
this.logger.info(
|
|
188
182
|
`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) finished as status: ${savedJob.status}`
|
|
189
183
|
);
|
|
@@ -230,6 +224,30 @@ class Processor {
|
|
|
230
224
|
return this.exec(instruction.resume.bind(instruction), node, job);
|
|
231
225
|
}
|
|
232
226
|
async exit(s) {
|
|
227
|
+
if (this.jobsToSave.size) {
|
|
228
|
+
const newJobs = [];
|
|
229
|
+
for (const job of this.jobsToSave.values()) {
|
|
230
|
+
if (job.isNewRecord) {
|
|
231
|
+
newJobs.push(job);
|
|
232
|
+
} else {
|
|
233
|
+
await job.save({ transaction: this.mainTransaction });
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
if (newJobs.length) {
|
|
237
|
+
const JobsModel = this.options.plugin.db.getModel("jobs");
|
|
238
|
+
await JobsModel.bulkCreate(
|
|
239
|
+
newJobs.map((job) => job.toJSON()),
|
|
240
|
+
{
|
|
241
|
+
transaction: this.mainTransaction,
|
|
242
|
+
returning: false
|
|
243
|
+
}
|
|
244
|
+
);
|
|
245
|
+
for (const job of newJobs) {
|
|
246
|
+
job.isNewRecord = false;
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
this.jobsToSave.clear();
|
|
250
|
+
}
|
|
233
251
|
if (typeof s === "number") {
|
|
234
252
|
const status = this.constructor.StatusMap[s] ?? Math.sign(s);
|
|
235
253
|
await this.execution.update({ status }, { transaction: this.mainTransaction });
|
|
@@ -240,32 +258,29 @@ class Processor {
|
|
|
240
258
|
this.logger.info(`execution (${this.execution.id}) exiting with status ${this.execution.status}`);
|
|
241
259
|
return null;
|
|
242
260
|
}
|
|
243
|
-
// TODO(optimize)
|
|
244
261
|
/**
|
|
245
262
|
* @experimental
|
|
246
263
|
*/
|
|
247
|
-
|
|
264
|
+
saveJob(payload) {
|
|
248
265
|
const { database } = this.execution.constructor;
|
|
249
|
-
const { mainTransaction: transaction } = this;
|
|
250
266
|
const { model } = database.getCollection("jobs");
|
|
251
267
|
let job;
|
|
252
268
|
if (payload instanceof model) {
|
|
253
|
-
job =
|
|
254
|
-
|
|
255
|
-
job = await model.findByPk(payload.id, { transaction });
|
|
256
|
-
await job.update(payload, { transaction });
|
|
269
|
+
job = payload;
|
|
270
|
+
job.set("updatedAt", /* @__PURE__ */ new Date());
|
|
257
271
|
} else {
|
|
258
|
-
job =
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
);
|
|
272
|
+
job = model.build({
|
|
273
|
+
...payload,
|
|
274
|
+
id: this.options.plugin.snowflake.getUniqueID().toString(),
|
|
275
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
276
|
+
updatedAt: /* @__PURE__ */ new Date(),
|
|
277
|
+
executionId: this.execution.id
|
|
278
|
+
});
|
|
265
279
|
}
|
|
266
|
-
this.
|
|
280
|
+
this.jobsToSave.set(job.id, job);
|
|
267
281
|
this.lastSavedJob = job;
|
|
268
|
-
this.jobsMapByNodeKey[job.nodeKey] = job
|
|
282
|
+
this.jobsMapByNodeKey[job.nodeKey] = job;
|
|
283
|
+
this.jobResultsMapByNodeKey[job.nodeKey] = job.result;
|
|
269
284
|
return job;
|
|
270
285
|
}
|
|
271
286
|
/**
|
|
@@ -319,31 +334,19 @@ class Processor {
|
|
|
319
334
|
* @experimental
|
|
320
335
|
*/
|
|
321
336
|
findBranchParentJob(job, node) {
|
|
322
|
-
|
|
323
|
-
if (j.nodeId === node.id) {
|
|
324
|
-
return j;
|
|
325
|
-
}
|
|
326
|
-
}
|
|
327
|
-
return null;
|
|
337
|
+
return this.jobsMapByNodeKey[node.key];
|
|
328
338
|
}
|
|
329
339
|
/**
|
|
330
340
|
* @experimental
|
|
331
341
|
*/
|
|
332
342
|
findBranchLastJob(node, job) {
|
|
333
|
-
const allJobs =
|
|
343
|
+
const allJobs = Object.values(this.jobsMapByNodeKey);
|
|
334
344
|
const branchJobs = [];
|
|
335
345
|
for (let n = this.findBranchEndNode(node); n && n !== node.upstream; n = n.upstream) {
|
|
336
346
|
branchJobs.push(...allJobs.filter((item) => item.nodeId === n.id));
|
|
337
347
|
}
|
|
338
|
-
branchJobs.sort((a, b) => a.
|
|
339
|
-
|
|
340
|
-
for (let j = branchJobs[i]; j && j.id !== job.id; j = this.jobsMap.get(j.upstreamId)) {
|
|
341
|
-
if (j.upstreamId === job.id) {
|
|
342
|
-
return branchJobs[i];
|
|
343
|
-
}
|
|
344
|
-
}
|
|
345
|
-
}
|
|
346
|
-
return null;
|
|
348
|
+
branchJobs.sort((a, b) => a.updatedAt.getTime() - b.updatedAt.getTime());
|
|
349
|
+
return branchJobs[branchJobs.length - 1] || null;
|
|
347
350
|
}
|
|
348
351
|
/**
|
|
349
352
|
* @experimental
|
|
@@ -362,12 +365,12 @@ class Processor {
|
|
|
362
365
|
for (let n = includeSelfScope ? node : this.findBranchParentNode(node); n; n = this.findBranchParentNode(n)) {
|
|
363
366
|
const instruction = this.options.plugin.instructions.get(n.type);
|
|
364
367
|
if (typeof (instruction == null ? void 0 : instruction.getScope) === "function") {
|
|
365
|
-
$scopes[n.id] = $scopes[n.key] = instruction.getScope(n, this.
|
|
368
|
+
$scopes[n.id] = $scopes[n.key] = instruction.getScope(n, this.jobResultsMapByNodeKey[n.key], this);
|
|
366
369
|
}
|
|
367
370
|
}
|
|
368
371
|
return {
|
|
369
372
|
$context: this.execution.context,
|
|
370
|
-
$jobsMapByNodeKey: this.
|
|
373
|
+
$jobsMapByNodeKey: this.jobResultsMapByNodeKey,
|
|
371
374
|
$system: systemFns,
|
|
372
375
|
$scopes,
|
|
373
376
|
$env: this.options.plugin.app.environment.getVariables()
|
|
@@ -45,16 +45,21 @@ class stats_default extends import_server.Migration {
|
|
|
45
45
|
});
|
|
46
46
|
const groupCounts = {};
|
|
47
47
|
for (const workflow of workflows) {
|
|
48
|
-
await WorkflowVersionStatsModel.
|
|
48
|
+
const versionStats = await WorkflowVersionStatsModel.findOne({
|
|
49
49
|
where: {
|
|
50
50
|
id: workflow.id
|
|
51
51
|
},
|
|
52
|
-
defaults: {
|
|
53
|
-
id: workflow.id,
|
|
54
|
-
executed: workflow.get("executed")
|
|
55
|
-
},
|
|
56
52
|
transaction
|
|
57
53
|
});
|
|
54
|
+
if (!versionStats) {
|
|
55
|
+
await WorkflowVersionStatsModel.create(
|
|
56
|
+
{
|
|
57
|
+
id: workflow.id,
|
|
58
|
+
executed: workflow.get("executed")
|
|
59
|
+
},
|
|
60
|
+
{ transaction }
|
|
61
|
+
);
|
|
62
|
+
}
|
|
58
63
|
const key = workflow.get("key");
|
|
59
64
|
groupCounts[key] = {
|
|
60
65
|
key,
|
|
@@ -62,13 +67,15 @@ class stats_default extends import_server.Migration {
|
|
|
62
67
|
};
|
|
63
68
|
}
|
|
64
69
|
for (const values of Object.values(groupCounts)) {
|
|
65
|
-
await WorkflowStatsModel.
|
|
70
|
+
const stats = await WorkflowStatsModel.findOne({
|
|
66
71
|
where: {
|
|
67
72
|
key: values.key
|
|
68
73
|
},
|
|
69
|
-
defaults: values,
|
|
70
74
|
transaction
|
|
71
75
|
});
|
|
76
|
+
if (!stats) {
|
|
77
|
+
await WorkflowStatsModel.create(values, { transaction });
|
|
78
|
+
}
|
|
72
79
|
}
|
|
73
80
|
});
|
|
74
81
|
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file is part of the NocoBase (R) project.
|
|
3
|
+
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
|
|
4
|
+
* Authors: NocoBase Team.
|
|
5
|
+
*
|
|
6
|
+
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
|
+
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
|
+
*/
|
|
9
|
+
import { Migration } from '@nocobase/server';
|
|
10
|
+
export default class extends Migration {
|
|
11
|
+
appVersion: string;
|
|
12
|
+
on: string;
|
|
13
|
+
up(): Promise<void>;
|
|
14
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* This file is part of the NocoBase (R) project.
|
|
3
|
+
* Copyright (c) 2020-2024 NocoBase Co., Ltd.
|
|
4
|
+
* Authors: NocoBase Team.
|
|
5
|
+
*
|
|
6
|
+
* This project is dual-licensed under AGPL-3.0 and NocoBase Commercial License.
|
|
7
|
+
* For more information, please refer to: https://www.nocobase.com/agreement.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
var __defProp = Object.defineProperty;
|
|
11
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
12
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
13
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
14
|
+
var __export = (target, all) => {
|
|
15
|
+
for (var name in all)
|
|
16
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
17
|
+
};
|
|
18
|
+
var __copyProps = (to, from, except, desc) => {
|
|
19
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
20
|
+
for (let key of __getOwnPropNames(from))
|
|
21
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
22
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
23
|
+
}
|
|
24
|
+
return to;
|
|
25
|
+
};
|
|
26
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
27
|
+
var remove_jobs_auto_increment_exports = {};
|
|
28
|
+
__export(remove_jobs_auto_increment_exports, {
|
|
29
|
+
default: () => remove_jobs_auto_increment_default
|
|
30
|
+
});
|
|
31
|
+
module.exports = __toCommonJS(remove_jobs_auto_increment_exports);
|
|
32
|
+
var import_server = require("@nocobase/server");
|
|
33
|
+
class remove_jobs_auto_increment_default extends import_server.Migration {
|
|
34
|
+
appVersion = "<1.7.0";
|
|
35
|
+
on = "beforeLoad";
|
|
36
|
+
async up() {
|
|
37
|
+
const { db } = this.context;
|
|
38
|
+
const jobCollection = db.collection({
|
|
39
|
+
name: "jobs"
|
|
40
|
+
});
|
|
41
|
+
const tableNameWithQuotes = jobCollection.getRealTableName(true);
|
|
42
|
+
await db.sequelize.transaction(async (transaction) => {
|
|
43
|
+
if (this.db.isPostgresCompatibleDialect()) {
|
|
44
|
+
await db.sequelize.query(`ALTER TABLE ${tableNameWithQuotes} ALTER COLUMN id DROP DEFAULT`, {
|
|
45
|
+
transaction
|
|
46
|
+
});
|
|
47
|
+
return;
|
|
48
|
+
}
|
|
49
|
+
if (this.db.isMySQLCompatibleDialect()) {
|
|
50
|
+
await db.sequelize.query(`ALTER TABLE ${tableNameWithQuotes} MODIFY COLUMN id BIGINT`, {
|
|
51
|
+
transaction
|
|
52
|
+
});
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
}
|
|
@@ -27,9 +27,11 @@ export default class DateFieldScheduleTrigger {
|
|
|
27
27
|
private timer;
|
|
28
28
|
private cache;
|
|
29
29
|
cacheCycle: number;
|
|
30
|
+
onAfterStart: () => void;
|
|
31
|
+
onBeforeStop: () => void;
|
|
30
32
|
constructor(workflow: Plugin);
|
|
31
|
-
reload():
|
|
32
|
-
inspect(
|
|
33
|
+
reload(): void;
|
|
34
|
+
inspect(workflow: WorkflowModel): Promise<void>;
|
|
33
35
|
loadRecordsToSchedule({ id, config: { collection, limit, startsOn, repeat, endsOn }, stats }: WorkflowModel, currentDate: Date): Promise<import("@nocobase/database").Model<any, any>[]>;
|
|
34
36
|
getRecordNextTime(workflow: WorkflowModel, record: any, nextSecond?: boolean): any;
|
|
35
37
|
schedule(workflow: WorkflowModel, record: any, nextTime: any, toggle?: boolean, options?: {}): Promise<void>;
|
|
@@ -99,44 +99,49 @@ function getHookId(workflow, type) {
|
|
|
99
99
|
class DateFieldScheduleTrigger {
|
|
100
100
|
constructor(workflow) {
|
|
101
101
|
this.workflow = workflow;
|
|
102
|
-
workflow.app.on("afterStart",
|
|
103
|
-
|
|
104
|
-
return;
|
|
105
|
-
}
|
|
106
|
-
this.timer = setInterval(() => this.reload(), this.cacheCycle);
|
|
107
|
-
this.reload();
|
|
108
|
-
});
|
|
109
|
-
workflow.app.on("beforeStop", () => {
|
|
110
|
-
if (this.timer) {
|
|
111
|
-
clearInterval(this.timer);
|
|
112
|
-
}
|
|
113
|
-
for (const [key, timer] of this.cache.entries()) {
|
|
114
|
-
clearTimeout(timer);
|
|
115
|
-
this.cache.delete(key);
|
|
116
|
-
}
|
|
117
|
-
});
|
|
102
|
+
workflow.app.on("afterStart", this.onAfterStart);
|
|
103
|
+
workflow.app.on("beforeStop", this.onBeforeStop);
|
|
118
104
|
}
|
|
119
105
|
events = /* @__PURE__ */ new Map();
|
|
120
106
|
timer = null;
|
|
121
107
|
cache = /* @__PURE__ */ new Map();
|
|
122
108
|
// caching workflows in range, default to 5min
|
|
123
109
|
cacheCycle = 3e5;
|
|
124
|
-
|
|
110
|
+
onAfterStart = () => {
|
|
111
|
+
if (this.timer) {
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
this.timer = setInterval(() => this.reload(), this.cacheCycle);
|
|
115
|
+
this.reload();
|
|
116
|
+
};
|
|
117
|
+
onBeforeStop = () => {
|
|
118
|
+
if (this.timer) {
|
|
119
|
+
clearInterval(this.timer);
|
|
120
|
+
}
|
|
121
|
+
for (const [key, timer] of this.cache.entries()) {
|
|
122
|
+
clearTimeout(timer);
|
|
123
|
+
this.cache.delete(key);
|
|
124
|
+
}
|
|
125
|
+
};
|
|
126
|
+
reload() {
|
|
127
|
+
for (const [key, timer] of this.cache.entries()) {
|
|
128
|
+
clearTimeout(timer);
|
|
129
|
+
this.cache.delete(key);
|
|
130
|
+
}
|
|
125
131
|
const workflows = Array.from(this.workflow.enabledCache.values()).filter(
|
|
126
132
|
(item) => item.type === "schedule" && item.config.mode === import_utils.SCHEDULE_MODE.DATE_FIELD
|
|
127
133
|
);
|
|
128
|
-
|
|
129
|
-
|
|
134
|
+
workflows.forEach((workflow) => {
|
|
135
|
+
this.inspect(workflow);
|
|
136
|
+
});
|
|
130
137
|
}
|
|
131
|
-
inspect(
|
|
138
|
+
async inspect(workflow) {
|
|
132
139
|
const now = /* @__PURE__ */ new Date();
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
this.schedule(workflow, record, nextTime, Boolean(nextTime));
|
|
139
|
-
});
|
|
140
|
+
const records = await this.loadRecordsToSchedule(workflow, now);
|
|
141
|
+
this.workflow.getLogger(workflow.id).info(`[Schedule on date field] ${records.length} records to schedule`);
|
|
142
|
+
records.forEach((record) => {
|
|
143
|
+
const nextTime = this.getRecordNextTime(workflow, record);
|
|
144
|
+
this.schedule(workflow, record, nextTime, Boolean(nextTime));
|
|
140
145
|
});
|
|
141
146
|
}
|
|
142
147
|
// 1. startsOn in range -> yes
|
|
@@ -210,8 +215,6 @@ class DateFieldScheduleTrigger {
|
|
|
210
215
|
[import_database.Op.gte]: new Date(endTimestamp)
|
|
211
216
|
}
|
|
212
217
|
});
|
|
213
|
-
} else {
|
|
214
|
-
this.workflow.getLogger(id).warn(`[Schedule on date field] "endsOn.field" is not configured`);
|
|
215
218
|
}
|
|
216
219
|
}
|
|
217
220
|
}
|
|
@@ -333,7 +336,7 @@ class DateFieldScheduleTrigger {
|
|
|
333
336
|
}
|
|
334
337
|
}
|
|
335
338
|
on(workflow) {
|
|
336
|
-
this.inspect(
|
|
339
|
+
this.inspect(workflow);
|
|
337
340
|
const { collection } = workflow.config;
|
|
338
341
|
const [dataSourceName, collectionName] = (0, import_data_source_manager.parseCollectionName)(collection);
|
|
339
342
|
const event = `${collectionName}.afterSaveWithAssociations`;
|
|
@@ -11,8 +11,10 @@ import { WorkflowModel } from '../../types';
|
|
|
11
11
|
export default class StaticScheduleTrigger {
|
|
12
12
|
workflow: Plugin;
|
|
13
13
|
private timers;
|
|
14
|
+
onAfterStart: () => void;
|
|
15
|
+
onBeforeStop: () => void;
|
|
14
16
|
constructor(workflow: Plugin);
|
|
15
|
-
inspect(
|
|
17
|
+
inspect(workflow: WorkflowModel): void;
|
|
16
18
|
getNextTime({ config, stats }: WorkflowModel, currentDate: Date, nextSecond?: boolean): number;
|
|
17
19
|
schedule(workflow: WorkflowModel, nextTime: number, toggle?: boolean): void;
|
|
18
20
|
trigger(workflow: WorkflowModel, time: number): Promise<void>;
|