@nocobase/plugin-workflow 0.19.0-alpha.1 → 0.19.0-alpha.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/components/CheckboxGroupWithTooltip.d.ts +7 -0
- package/dist/client/components/{ExecutionStatusSelect.d.ts → ExecutionStatus.d.ts} +1 -0
- package/dist/client/components/index.d.ts +1 -0
- package/dist/client/index.d.ts +1 -0
- package/dist/client/index.js +35 -30
- package/dist/client/nodes/end.d.ts +22 -0
- package/dist/client/nodes/index.d.ts +3 -0
- package/dist/client/schemas/executions.d.ts +14 -1
- package/dist/client/triggers/index.d.ts +2 -1
- package/dist/client/triggers/schedule/constants.d.ts +1 -1
- package/dist/client/triggers/schedule/index.d.ts +1 -0
- package/dist/externalVersion.js +10 -10
- package/dist/locale/ko_KR.json +168 -0
- package/dist/locale/zh-CN.json +21 -5
- package/dist/node_modules/cron-parser/package.json +1 -1
- package/dist/node_modules/lru-cache/package.json +1 -1
- package/dist/server/Plugin.d.ts +5 -2
- package/dist/server/Plugin.js +81 -50
- package/dist/server/Processor.d.ts +4 -1
- package/dist/server/Processor.js +27 -15
- package/dist/server/actions/executions.d.ts +1 -0
- package/dist/server/actions/executions.js +38 -0
- package/dist/server/actions/workflows.js +3 -1
- package/dist/server/collections/jobs.js +4 -0
- package/dist/server/collections/workflows.js +5 -0
- package/dist/server/instructions/ConditionInstruction.d.ts +1 -0
- package/dist/server/instructions/ConditionInstruction.js +1 -0
- package/dist/server/instructions/CreateInstruction.js +5 -5
- package/dist/server/instructions/DestroyInstruction.js +3 -3
- package/dist/server/instructions/EndInstruction.d.ts +6 -0
- package/dist/server/instructions/EndInstruction.js +46 -0
- package/dist/server/instructions/QueryInstruction.js +2 -2
- package/dist/server/instructions/UpdateInstruction.js +3 -3
- package/dist/server/migrations/20240115220721-add-node-key-to-job.d.ts +6 -0
- package/dist/server/migrations/20240115220721-add-node-key-to-job.js +54 -0
- package/dist/server/triggers/CollectionTrigger.d.ts +2 -0
- package/dist/server/triggers/CollectionTrigger.js +28 -7
- package/dist/server/triggers/ScheduleTrigger/DateFieldScheduleTrigger.d.ts +31 -0
- package/dist/server/triggers/ScheduleTrigger/DateFieldScheduleTrigger.js +334 -0
- package/dist/server/triggers/ScheduleTrigger/StaticScheduleTrigger.d.ts +15 -0
- package/dist/server/triggers/ScheduleTrigger/StaticScheduleTrigger.js +143 -0
- package/dist/server/triggers/ScheduleTrigger/index.d.ts +13 -0
- package/dist/server/triggers/ScheduleTrigger/index.js +74 -0
- package/dist/server/triggers/ScheduleTrigger/utils.d.ts +5 -0
- package/dist/server/triggers/ScheduleTrigger/utils.js +35 -0
- package/dist/server/triggers/index.d.ts +2 -0
- package/dist/server/triggers/index.js +4 -0
- package/dist/server/types/Workflow.d.ts +5 -2
- package/package.json +10 -5
- package/dist/server/triggers/ScheduleTrigger.d.ts +0 -41
- package/dist/server/triggers/ScheduleTrigger.js +0 -480
|
@@ -86,13 +86,17 @@ async function handler(workflow, data, options) {
|
|
|
86
86
|
});
|
|
87
87
|
}
|
|
88
88
|
const json = (0, import_utils.toJSON)(result);
|
|
89
|
-
|
|
90
|
-
workflow
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
89
|
+
if (workflow.sync) {
|
|
90
|
+
await this.workflow.trigger(
|
|
91
|
+
workflow,
|
|
92
|
+
{ data: json, stack: context == null ? void 0 : context.stack },
|
|
93
|
+
{
|
|
94
|
+
transaction
|
|
95
|
+
}
|
|
96
|
+
);
|
|
97
|
+
} else {
|
|
98
|
+
this.workflow.trigger(workflow, { data: json, stack: context == null ? void 0 : context.stack });
|
|
99
|
+
}
|
|
96
100
|
}
|
|
97
101
|
class CollectionTrigger extends import__.default {
|
|
98
102
|
events = /* @__PURE__ */ new Map();
|
|
@@ -140,4 +144,21 @@ class CollectionTrigger extends import__.default {
|
|
|
140
144
|
}
|
|
141
145
|
}
|
|
142
146
|
}
|
|
147
|
+
async validateEvent(workflow, context, options) {
|
|
148
|
+
if (context.stack) {
|
|
149
|
+
const existed = await workflow.countExecutions({
|
|
150
|
+
where: {
|
|
151
|
+
id: context.stack
|
|
152
|
+
},
|
|
153
|
+
transaction: options.transaction
|
|
154
|
+
});
|
|
155
|
+
if (existed) {
|
|
156
|
+
this.workflow.getLogger(workflow.id).warn(
|
|
157
|
+
`workflow ${workflow.id} has already been triggered in stack executions (${context.stack}), and newly triggering will be skipped.`
|
|
158
|
+
);
|
|
159
|
+
return false;
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
return true;
|
|
163
|
+
}
|
|
143
164
|
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { Transactionable } from '@nocobase/database';
|
|
2
|
+
import type Plugin from '../../Plugin';
|
|
3
|
+
import type { WorkflowModel } from '../../types';
|
|
4
|
+
export type ScheduleOnField = {
|
|
5
|
+
field: string;
|
|
6
|
+
offset?: number;
|
|
7
|
+
unit?: 1000 | 60000 | 3600000 | 86400000;
|
|
8
|
+
};
|
|
9
|
+
export interface ScheduleTriggerConfig {
|
|
10
|
+
mode: number;
|
|
11
|
+
repeat?: string | number | null;
|
|
12
|
+
limit?: number;
|
|
13
|
+
startsOn?: ScheduleOnField;
|
|
14
|
+
endsOn?: string | ScheduleOnField;
|
|
15
|
+
}
|
|
16
|
+
export default class ScheduleTrigger {
|
|
17
|
+
workflow: Plugin;
|
|
18
|
+
events: Map<any, any>;
|
|
19
|
+
private timer;
|
|
20
|
+
private cache;
|
|
21
|
+
cacheCycle: number;
|
|
22
|
+
constructor(workflow: Plugin);
|
|
23
|
+
reload(): Promise<void>;
|
|
24
|
+
inspect(workflows: WorkflowModel[]): void;
|
|
25
|
+
loadRecordsToSchedule({ config: { collection, limit, startsOn, repeat, endsOn }, allExecuted }: WorkflowModel, currentDate: Date): Promise<import("@nocobase/database").Model<any, any>[]>;
|
|
26
|
+
getRecordNextTime(workflow: WorkflowModel, record: any, nextSecond?: boolean): any;
|
|
27
|
+
schedule(workflow: WorkflowModel, record: any, nextTime: any, toggle?: boolean, options?: {}): Promise<void>;
|
|
28
|
+
trigger(workflow: WorkflowModel, record: any, nextTime: any, { transaction }?: Transactionable): Promise<void>;
|
|
29
|
+
on(workflow: WorkflowModel): void;
|
|
30
|
+
off(workflow: WorkflowModel): void;
|
|
31
|
+
}
|
|
@@ -0,0 +1,334 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var DateFieldScheduleTrigger_exports = {};
|
|
29
|
+
__export(DateFieldScheduleTrigger_exports, {
|
|
30
|
+
default: () => ScheduleTrigger
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(DateFieldScheduleTrigger_exports);
|
|
33
|
+
var import_database = require("@nocobase/database");
|
|
34
|
+
var import_cron_parser = __toESM(require("cron-parser"));
|
|
35
|
+
var import_utils = require("./utils");
|
|
36
|
+
function getOnTimestampWithOffset({ field, offset = 0, unit = 1e3 }, now) {
|
|
37
|
+
if (!field) {
|
|
38
|
+
return null;
|
|
39
|
+
}
|
|
40
|
+
const timestamp = now.getTime();
|
|
41
|
+
return timestamp - offset * unit;
|
|
42
|
+
}
|
|
43
|
+
function getDataOptionTime(record, on, dir = 1) {
|
|
44
|
+
if (!on) {
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
switch (typeof on) {
|
|
48
|
+
case "string": {
|
|
49
|
+
const time = (0, import_utils.parseDateWithoutMs)(on);
|
|
50
|
+
return time ? time : null;
|
|
51
|
+
}
|
|
52
|
+
case "object": {
|
|
53
|
+
const { field, offset = 0, unit = 1e3 } = on;
|
|
54
|
+
if (!record.get(field)) {
|
|
55
|
+
return null;
|
|
56
|
+
}
|
|
57
|
+
const second = new Date(record.get(field).getTime());
|
|
58
|
+
second.setMilliseconds(0);
|
|
59
|
+
return second.getTime() + offset * unit * dir;
|
|
60
|
+
}
|
|
61
|
+
default:
|
|
62
|
+
return null;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
const DialectTimestampFnMap = {
|
|
66
|
+
postgres(col) {
|
|
67
|
+
return `CAST(FLOOR(extract(epoch from "${col}")) AS INTEGER)`;
|
|
68
|
+
},
|
|
69
|
+
mysql(col) {
|
|
70
|
+
return `CAST(FLOOR(UNIX_TIMESTAMP(\`${col}\`)) AS SIGNED INTEGER)`;
|
|
71
|
+
},
|
|
72
|
+
sqlite(col) {
|
|
73
|
+
return `CAST(FLOOR(unixepoch(${col})) AS INTEGER)`;
|
|
74
|
+
}
|
|
75
|
+
};
|
|
76
|
+
DialectTimestampFnMap.mariadb = DialectTimestampFnMap.mysql;
|
|
77
|
+
function getCronNextTime(cron, currentDate) {
|
|
78
|
+
const interval = import_cron_parser.default.parseExpression(cron, { currentDate });
|
|
79
|
+
const next = interval.next();
|
|
80
|
+
return next.getTime();
|
|
81
|
+
}
|
|
82
|
+
function matchCronNextTime(cron, currentDate, range) {
|
|
83
|
+
return getCronNextTime(cron, currentDate) - currentDate.getTime() <= range;
|
|
84
|
+
}
|
|
85
|
+
function getHookId(workflow, type) {
|
|
86
|
+
return `${type}#${workflow.id}`;
|
|
87
|
+
}
|
|
88
|
+
class ScheduleTrigger {
|
|
89
|
+
constructor(workflow) {
|
|
90
|
+
this.workflow = workflow;
|
|
91
|
+
workflow.app.on("afterStart", async () => {
|
|
92
|
+
if (this.timer) {
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
this.timer = setInterval(() => this.reload(), this.cacheCycle);
|
|
96
|
+
this.reload();
|
|
97
|
+
});
|
|
98
|
+
workflow.app.on("beforeStop", () => {
|
|
99
|
+
if (this.timer) {
|
|
100
|
+
clearInterval(this.timer);
|
|
101
|
+
}
|
|
102
|
+
for (const [key, timer] of this.cache.entries()) {
|
|
103
|
+
clearTimeout(timer);
|
|
104
|
+
this.cache.delete(key);
|
|
105
|
+
}
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
events = /* @__PURE__ */ new Map();
|
|
109
|
+
timer = null;
|
|
110
|
+
cache = /* @__PURE__ */ new Map();
|
|
111
|
+
// caching workflows in range, default to 5min
|
|
112
|
+
cacheCycle = 3e5;
|
|
113
|
+
async reload() {
|
|
114
|
+
const WorkflowRepo = this.workflow.app.db.getRepository("workflows");
|
|
115
|
+
const workflows = await WorkflowRepo.find({
|
|
116
|
+
filter: { enabled: true, type: "schedule", "config.mode": import_utils.SCHEDULE_MODE.DATE_FIELD }
|
|
117
|
+
});
|
|
118
|
+
this.cache = /* @__PURE__ */ new Map();
|
|
119
|
+
this.inspect(workflows);
|
|
120
|
+
}
|
|
121
|
+
inspect(workflows) {
|
|
122
|
+
const now = /* @__PURE__ */ new Date();
|
|
123
|
+
workflows.forEach(async (workflow) => {
|
|
124
|
+
const records = await this.loadRecordsToSchedule(workflow, now);
|
|
125
|
+
records.forEach((record) => {
|
|
126
|
+
const nextTime = this.getRecordNextTime(workflow, record);
|
|
127
|
+
this.schedule(workflow, record, nextTime, Boolean(nextTime));
|
|
128
|
+
});
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
// 1. startsOn in range -> yes
|
|
132
|
+
// 2. startsOn before now, has no repeat -> no
|
|
133
|
+
// 3. startsOn before now, and has repeat:
|
|
134
|
+
// a. repeat out of range -> no
|
|
135
|
+
// b. repeat in range (number or cron):
|
|
136
|
+
// i. endsOn after now -> yes
|
|
137
|
+
// ii. endsOn before now -> no
|
|
138
|
+
async loadRecordsToSchedule({ config: { collection, limit, startsOn, repeat, endsOn }, allExecuted }, currentDate) {
|
|
139
|
+
const { db } = this.workflow.app;
|
|
140
|
+
if (limit && allExecuted >= limit) {
|
|
141
|
+
return [];
|
|
142
|
+
}
|
|
143
|
+
if (!startsOn) {
|
|
144
|
+
return [];
|
|
145
|
+
}
|
|
146
|
+
const timestamp = currentDate.getTime();
|
|
147
|
+
const startTimestamp = getOnTimestampWithOffset(startsOn, currentDate);
|
|
148
|
+
if (!startTimestamp) {
|
|
149
|
+
return [];
|
|
150
|
+
}
|
|
151
|
+
const range = this.cacheCycle * 2;
|
|
152
|
+
const conditions = [
|
|
153
|
+
{
|
|
154
|
+
[startsOn.field]: {
|
|
155
|
+
// cache next 2 cycles
|
|
156
|
+
[import_database.Op.lt]: new Date(startTimestamp + range)
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
];
|
|
160
|
+
if (repeat) {
|
|
161
|
+
if (typeof repeat === "number") {
|
|
162
|
+
const tsFn = DialectTimestampFnMap[db.options.dialect];
|
|
163
|
+
if (repeat > range && tsFn) {
|
|
164
|
+
const modExp = (0, import_database.fn)(
|
|
165
|
+
"MOD",
|
|
166
|
+
(0, import_database.literal)(`${Math.round(timestamp / 1e3)} - ${tsFn(startsOn.field)}`),
|
|
167
|
+
Math.round(repeat / 1e3)
|
|
168
|
+
);
|
|
169
|
+
conditions.push((0, import_database.where)(modExp, { [import_database.Op.lt]: Math.round(range / 1e3) }));
|
|
170
|
+
}
|
|
171
|
+
} else if (typeof repeat === "string") {
|
|
172
|
+
if (!matchCronNextTime(repeat, currentDate, range)) {
|
|
173
|
+
return [];
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
if (endsOn) {
|
|
177
|
+
const now = /* @__PURE__ */ new Date();
|
|
178
|
+
const endTimestamp = getOnTimestampWithOffset(endsOn, now);
|
|
179
|
+
if (!endTimestamp) {
|
|
180
|
+
return [];
|
|
181
|
+
}
|
|
182
|
+
if (typeof endsOn === "string") {
|
|
183
|
+
if (endTimestamp <= timestamp) {
|
|
184
|
+
return [];
|
|
185
|
+
}
|
|
186
|
+
} else {
|
|
187
|
+
conditions.push({
|
|
188
|
+
[endsOn.field]: {
|
|
189
|
+
[import_database.Op.gte]: new Date(endTimestamp)
|
|
190
|
+
}
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
} else {
|
|
195
|
+
conditions.push({
|
|
196
|
+
[startsOn.field]: {
|
|
197
|
+
[import_database.Op.gte]: new Date(startTimestamp)
|
|
198
|
+
}
|
|
199
|
+
});
|
|
200
|
+
}
|
|
201
|
+
const { model } = db.getCollection(collection);
|
|
202
|
+
return model.findAll({
|
|
203
|
+
where: {
|
|
204
|
+
[import_database.Op.and]: conditions
|
|
205
|
+
}
|
|
206
|
+
});
|
|
207
|
+
}
|
|
208
|
+
getRecordNextTime(workflow, record, nextSecond = false) {
|
|
209
|
+
const {
|
|
210
|
+
config: { startsOn, endsOn, repeat, limit },
|
|
211
|
+
allExecuted
|
|
212
|
+
} = workflow;
|
|
213
|
+
if (limit && allExecuted >= limit) {
|
|
214
|
+
return null;
|
|
215
|
+
}
|
|
216
|
+
const range = this.cacheCycle;
|
|
217
|
+
const now = /* @__PURE__ */ new Date();
|
|
218
|
+
now.setMilliseconds(nextSecond ? 1e3 : 0);
|
|
219
|
+
const timestamp = now.getTime();
|
|
220
|
+
const startTime = getDataOptionTime(record, startsOn);
|
|
221
|
+
const endTime = getDataOptionTime(record, endsOn);
|
|
222
|
+
let nextTime = null;
|
|
223
|
+
if (!startTime) {
|
|
224
|
+
return null;
|
|
225
|
+
}
|
|
226
|
+
if (startTime > timestamp + range) {
|
|
227
|
+
return null;
|
|
228
|
+
}
|
|
229
|
+
if (startTime >= timestamp) {
|
|
230
|
+
return !endTime || endTime >= startTime && endTime < timestamp + range ? startTime : null;
|
|
231
|
+
} else {
|
|
232
|
+
if (!repeat) {
|
|
233
|
+
return null;
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
if (typeof repeat === "number") {
|
|
237
|
+
const nextRepeatTime = (startTime - timestamp) % repeat + repeat;
|
|
238
|
+
if (nextRepeatTime > range) {
|
|
239
|
+
return null;
|
|
240
|
+
}
|
|
241
|
+
if (endTime && endTime < timestamp + nextRepeatTime) {
|
|
242
|
+
return null;
|
|
243
|
+
}
|
|
244
|
+
nextTime = timestamp + nextRepeatTime;
|
|
245
|
+
} else if (typeof repeat === "string") {
|
|
246
|
+
nextTime = getCronNextTime(repeat, now);
|
|
247
|
+
if (nextTime - timestamp > range) {
|
|
248
|
+
return null;
|
|
249
|
+
}
|
|
250
|
+
if (endTime && endTime < nextTime) {
|
|
251
|
+
return null;
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
if (endTime && endTime <= timestamp) {
|
|
255
|
+
return null;
|
|
256
|
+
}
|
|
257
|
+
return nextTime;
|
|
258
|
+
}
|
|
259
|
+
schedule(workflow, record, nextTime, toggle = true, options = {}) {
|
|
260
|
+
const { model } = this.workflow.app.db.getCollection(workflow.config.collection);
|
|
261
|
+
const recordPk = record.get(model.primaryKeyAttribute);
|
|
262
|
+
if (toggle) {
|
|
263
|
+
const nextInterval = Math.max(0, nextTime - Date.now());
|
|
264
|
+
const key = `${workflow.id}:${recordPk}@${nextTime}`;
|
|
265
|
+
if (!this.cache.has(key)) {
|
|
266
|
+
if (nextInterval) {
|
|
267
|
+
this.cache.set(key, setTimeout(this.trigger.bind(this, workflow, record, nextTime), nextInterval));
|
|
268
|
+
} else {
|
|
269
|
+
return this.trigger(workflow, record, nextTime, options);
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
} else {
|
|
273
|
+
for (const [key, timer] of this.cache.entries()) {
|
|
274
|
+
if (key.startsWith(`${workflow.id}:${recordPk}@`)) {
|
|
275
|
+
clearTimeout(timer);
|
|
276
|
+
this.cache.delete(key);
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
async trigger(workflow, record, nextTime, { transaction } = {}) {
|
|
282
|
+
const { repository, model } = this.workflow.app.db.getCollection(workflow.config.collection);
|
|
283
|
+
const recordPk = record.get(model.primaryKeyAttribute);
|
|
284
|
+
const data = await repository.findOne({
|
|
285
|
+
filterByTk: recordPk,
|
|
286
|
+
appends: workflow.config.appends,
|
|
287
|
+
transaction
|
|
288
|
+
});
|
|
289
|
+
const key = `${workflow.id}:${recordPk}@${nextTime}`;
|
|
290
|
+
this.cache.delete(key);
|
|
291
|
+
this.workflow.trigger(workflow, {
|
|
292
|
+
data: data.toJSON(),
|
|
293
|
+
date: new Date(nextTime)
|
|
294
|
+
});
|
|
295
|
+
if (!workflow.config.repeat || workflow.config.limit && workflow.allExecuted >= workflow.config.limit - 1) {
|
|
296
|
+
return;
|
|
297
|
+
}
|
|
298
|
+
const n = this.getRecordNextTime(workflow, data, true);
|
|
299
|
+
if (n) {
|
|
300
|
+
this.schedule(workflow, data, n, true);
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
on(workflow) {
|
|
304
|
+
this.inspect([workflow]);
|
|
305
|
+
const { collection } = workflow.config;
|
|
306
|
+
const event = `${collection}.afterSaveWithAssociations`;
|
|
307
|
+
const name = getHookId(workflow, event);
|
|
308
|
+
if (this.events.has(name)) {
|
|
309
|
+
return;
|
|
310
|
+
}
|
|
311
|
+
const listener = async (data, { transaction }) => {
|
|
312
|
+
const nextTime = this.getRecordNextTime(workflow, data);
|
|
313
|
+
return this.schedule(workflow, data, nextTime, Boolean(nextTime), { transaction });
|
|
314
|
+
};
|
|
315
|
+
this.events.set(name, listener);
|
|
316
|
+
this.workflow.app.db.on(event, listener);
|
|
317
|
+
}
|
|
318
|
+
off(workflow) {
|
|
319
|
+
for (const [key, timer] of this.cache.entries()) {
|
|
320
|
+
if (key.startsWith(`${workflow.id}:`)) {
|
|
321
|
+
clearTimeout(timer);
|
|
322
|
+
this.cache.delete(key);
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
const { collection } = workflow.config;
|
|
326
|
+
const event = `${collection}.afterSave`;
|
|
327
|
+
const name = getHookId(workflow, event);
|
|
328
|
+
if (this.events.has(name)) {
|
|
329
|
+
const listener = this.events.get(name);
|
|
330
|
+
this.events.delete(name);
|
|
331
|
+
this.workflow.app.db.off(event, listener);
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type Plugin from '../../Plugin';
|
|
2
|
+
export default class StaticScheduleTrigger {
|
|
3
|
+
workflow: Plugin;
|
|
4
|
+
private timers;
|
|
5
|
+
constructor(workflow: Plugin);
|
|
6
|
+
inspect(workflows: any): void;
|
|
7
|
+
getNextTime({ config, allExecuted }: {
|
|
8
|
+
config: any;
|
|
9
|
+
allExecuted: any;
|
|
10
|
+
}, currentDate: any, nextSecond?: boolean): any;
|
|
11
|
+
schedule(workflow: any, nextTime: any, toggle?: boolean): void;
|
|
12
|
+
trigger(workflow: any, time: any): Promise<void>;
|
|
13
|
+
on(workflow: any): void;
|
|
14
|
+
off(workflow: any): void;
|
|
15
|
+
}
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var StaticScheduleTrigger_exports = {};
|
|
29
|
+
__export(StaticScheduleTrigger_exports, {
|
|
30
|
+
default: () => StaticScheduleTrigger
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(StaticScheduleTrigger_exports);
|
|
33
|
+
var import_cron_parser = __toESM(require("cron-parser"));
|
|
34
|
+
var import_utils = require("./utils");
|
|
35
|
+
const MAX_SAFE_INTERVAL = 2147483647;
|
|
36
|
+
class StaticScheduleTrigger {
|
|
37
|
+
constructor(workflow) {
|
|
38
|
+
this.workflow = workflow;
|
|
39
|
+
workflow.app.on("afterStart", async () => {
|
|
40
|
+
const WorkflowRepo = this.workflow.app.db.getRepository("workflows");
|
|
41
|
+
const workflows = await WorkflowRepo.find({
|
|
42
|
+
filter: { enabled: true, type: "schedule", "config.mode": import_utils.SCHEDULE_MODE.STATIC }
|
|
43
|
+
});
|
|
44
|
+
this.inspect(workflows);
|
|
45
|
+
});
|
|
46
|
+
workflow.app.on("beforeStop", () => {
|
|
47
|
+
for (const timer of this.timers.values()) {
|
|
48
|
+
clearInterval(timer);
|
|
49
|
+
}
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
timers = /* @__PURE__ */ new Map();
|
|
53
|
+
inspect(workflows) {
|
|
54
|
+
const now = /* @__PURE__ */ new Date();
|
|
55
|
+
now.setMilliseconds(0);
|
|
56
|
+
workflows.forEach((workflow) => {
|
|
57
|
+
const nextTime = this.getNextTime(workflow, now);
|
|
58
|
+
if (nextTime) {
|
|
59
|
+
this.workflow.getLogger(workflow.id).info(`caching scheduled workflow will run at: ${new Date(nextTime).toISOString()}`);
|
|
60
|
+
} else {
|
|
61
|
+
this.workflow.getLogger(workflow.id).info("workflow will not be scheduled");
|
|
62
|
+
}
|
|
63
|
+
this.schedule(workflow, nextTime, nextTime >= now.getTime());
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
getNextTime({ config, allExecuted }, currentDate, nextSecond = false) {
|
|
67
|
+
if (config.limit && allExecuted >= config.limit) {
|
|
68
|
+
return null;
|
|
69
|
+
}
|
|
70
|
+
if (!config.startsOn) {
|
|
71
|
+
return null;
|
|
72
|
+
}
|
|
73
|
+
currentDate.setMilliseconds(nextSecond ? 1e3 : 0);
|
|
74
|
+
const timestamp = currentDate.getTime();
|
|
75
|
+
const startTime = (0, import_utils.parseDateWithoutMs)(config.startsOn);
|
|
76
|
+
if (startTime > timestamp) {
|
|
77
|
+
return startTime;
|
|
78
|
+
}
|
|
79
|
+
if (config.repeat) {
|
|
80
|
+
const endTime = config.endsOn ? (0, import_utils.parseDateWithoutMs)(config.endsOn) : null;
|
|
81
|
+
if (endTime && endTime < timestamp) {
|
|
82
|
+
return null;
|
|
83
|
+
}
|
|
84
|
+
if (typeof config.repeat === "string") {
|
|
85
|
+
const interval = import_cron_parser.default.parseExpression(config.repeat, { currentDate });
|
|
86
|
+
const next = interval.next();
|
|
87
|
+
return next.getTime();
|
|
88
|
+
} else if (typeof config.repeat === "number") {
|
|
89
|
+
return timestamp + (timestamp - startTime) % config.repeat;
|
|
90
|
+
} else {
|
|
91
|
+
return null;
|
|
92
|
+
}
|
|
93
|
+
} else {
|
|
94
|
+
if (startTime < timestamp) {
|
|
95
|
+
return null;
|
|
96
|
+
}
|
|
97
|
+
return timestamp;
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
schedule(workflow, nextTime, toggle = true) {
|
|
101
|
+
if (toggle) {
|
|
102
|
+
const key = `${workflow.id}@${nextTime}`;
|
|
103
|
+
if (!this.timers.has(key)) {
|
|
104
|
+
const interval = Math.max(nextTime - Date.now(), 0);
|
|
105
|
+
if (interval > MAX_SAFE_INTERVAL) {
|
|
106
|
+
this.timers.set(
|
|
107
|
+
key,
|
|
108
|
+
setTimeout(() => {
|
|
109
|
+
this.timers.delete(key);
|
|
110
|
+
this.schedule(workflow, nextTime);
|
|
111
|
+
}, MAX_SAFE_INTERVAL)
|
|
112
|
+
);
|
|
113
|
+
} else {
|
|
114
|
+
this.timers.set(key, setTimeout(this.trigger.bind(this, workflow, nextTime), interval));
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
} else {
|
|
118
|
+
for (const [key, timer] of this.timers.entries()) {
|
|
119
|
+
if (key.startsWith(`${workflow.id}@`)) {
|
|
120
|
+
clearTimeout(timer);
|
|
121
|
+
this.timers.delete(key);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
async trigger(workflow, time) {
|
|
127
|
+
this.timers.delete(`${workflow.id}@${time}`);
|
|
128
|
+
this.workflow.trigger(workflow, { date: new Date(time) });
|
|
129
|
+
if (!workflow.config.repeat || workflow.config.limit && workflow.allExecuted >= workflow.config.limit - 1) {
|
|
130
|
+
return;
|
|
131
|
+
}
|
|
132
|
+
const nextTime = this.getNextTime(workflow, /* @__PURE__ */ new Date(), true);
|
|
133
|
+
if (nextTime) {
|
|
134
|
+
this.schedule(workflow, nextTime);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
on(workflow) {
|
|
138
|
+
this.inspect([workflow]);
|
|
139
|
+
}
|
|
140
|
+
off(workflow) {
|
|
141
|
+
this.schedule(workflow, null, false);
|
|
142
|
+
}
|
|
143
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { Transactionable } from 'sequelize';
|
|
2
|
+
import Trigger from '..';
|
|
3
|
+
import type Plugin from '../../Plugin';
|
|
4
|
+
import { WorkflowModel } from '../../types';
|
|
5
|
+
export default class ScheduleTrigger extends Trigger {
|
|
6
|
+
sync: boolean;
|
|
7
|
+
private modes;
|
|
8
|
+
constructor(workflow: Plugin);
|
|
9
|
+
private getTrigger;
|
|
10
|
+
on(workflow: any): void;
|
|
11
|
+
off(workflow: any): void;
|
|
12
|
+
validateEvent(workflow: WorkflowModel, context: any, options: Transactionable): Promise<boolean>;
|
|
13
|
+
}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var ScheduleTrigger_exports = {};
|
|
29
|
+
__export(ScheduleTrigger_exports, {
|
|
30
|
+
default: () => ScheduleTrigger
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(ScheduleTrigger_exports);
|
|
33
|
+
var import__ = __toESM(require(".."));
|
|
34
|
+
var import_DateFieldScheduleTrigger = __toESM(require("./DateFieldScheduleTrigger"));
|
|
35
|
+
var import_StaticScheduleTrigger = __toESM(require("./StaticScheduleTrigger"));
|
|
36
|
+
var import_utils = require("./utils");
|
|
37
|
+
class ScheduleTrigger extends import__.default {
|
|
38
|
+
sync = false;
|
|
39
|
+
modes = /* @__PURE__ */ new Map();
|
|
40
|
+
constructor(workflow) {
|
|
41
|
+
super(workflow);
|
|
42
|
+
this.modes.set(import_utils.SCHEDULE_MODE.STATIC, new import_StaticScheduleTrigger.default(workflow));
|
|
43
|
+
this.modes.set(import_utils.SCHEDULE_MODE.DATE_FIELD, new import_DateFieldScheduleTrigger.default(workflow));
|
|
44
|
+
}
|
|
45
|
+
getTrigger(mode) {
|
|
46
|
+
return this.modes.get(mode);
|
|
47
|
+
}
|
|
48
|
+
on(workflow) {
|
|
49
|
+
const mode = workflow.config.mode;
|
|
50
|
+
const trigger = this.getTrigger(mode);
|
|
51
|
+
if (trigger) {
|
|
52
|
+
trigger.on(workflow);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
off(workflow) {
|
|
56
|
+
const mode = workflow.config.mode;
|
|
57
|
+
const trigger = this.getTrigger(mode);
|
|
58
|
+
if (trigger) {
|
|
59
|
+
trigger.off(workflow);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
async validateEvent(workflow, context, options) {
|
|
63
|
+
if (!context.date) {
|
|
64
|
+
return false;
|
|
65
|
+
}
|
|
66
|
+
const existed = await workflow.countExecutions({
|
|
67
|
+
where: {
|
|
68
|
+
"context.date": context.date
|
|
69
|
+
},
|
|
70
|
+
transaction: options.transaction
|
|
71
|
+
});
|
|
72
|
+
return !existed;
|
|
73
|
+
}
|
|
74
|
+
}
|