@nocobase/plugin-workflow 2.0.0-alpha.4 → 2.0.0-alpha.41

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -98,9 +98,6 @@ export default class extends Instruction {
98
98
  branching: ({ rejectOnFalse }?: {
99
99
  rejectOnFalse?: boolean;
100
100
  }) => false | ({
101
- label: string;
102
- value: boolean;
103
- } | {
104
101
  label: string;
105
102
  value: 1;
106
103
  } | {
@@ -140,9 +140,9 @@ export declare const executionSchema: {
140
140
  unique?: undefined;
141
141
  onDelete?: undefined;
142
142
  } | {
143
- interface: string;
144
143
  type: string;
145
144
  name: string;
145
+ interface: string;
146
146
  uiSchema: {
147
147
  type: string;
148
148
  title: string;
@@ -129,9 +129,9 @@ declare const _default: {
129
129
  unique?: undefined;
130
130
  onDelete?: undefined;
131
131
  } | {
132
- interface: string;
133
132
  type: string;
134
133
  name: string;
134
+ interface: string;
135
135
  uiSchema: {
136
136
  type: string;
137
137
  title: string;
@@ -115,9 +115,9 @@ var executions_default = {
115
115
  name: "output"
116
116
  },
117
117
  {
118
- interface: "createdAt",
119
118
  type: "datetime",
120
119
  name: "createdAt",
120
+ interface: "createdAt",
121
121
  uiSchema: {
122
122
  type: "datetime",
123
123
  title: `{{t("Triggered at", { ns: "${import_constants.NAMESPACE}" })}}`,
@@ -125,6 +125,18 @@ var executions_default = {
125
125
  "x-component-props": {},
126
126
  "x-read-pretty": true
127
127
  }
128
+ },
129
+ {
130
+ type: "boolean",
131
+ name: "manually",
132
+ interface: "checkbox",
133
+ uiSchema: {
134
+ type: "boolean",
135
+ title: `{{t("Triggered manually", { ns: "${import_constants.NAMESPACE}" })}}`,
136
+ "x-component": "Checkbox",
137
+ "x-component-props": {},
138
+ "x-read-pretty": true
139
+ }
128
140
  }
129
141
  ],
130
142
  indexes: [{ fields: ["dispatched", "id"] }]
@@ -11,8 +11,8 @@ module.exports = {
11
11
  "react": "18.2.0",
12
12
  "@formily/core": "2.3.7",
13
13
  "@formily/react": "2.3.7",
14
- "@nocobase/client": "2.0.0-alpha.4",
15
- "@nocobase/utils": "2.0.0-alpha.4",
14
+ "@nocobase/client": "2.0.0-alpha.41",
15
+ "@nocobase/utils": "2.0.0-alpha.41",
16
16
  "antd": "5.24.2",
17
17
  "@ant-design/icons": "5.6.1",
18
18
  "react-router-dom": "6.30.1",
@@ -20,17 +20,17 @@ module.exports = {
20
20
  "lodash": "4.17.21",
21
21
  "@dnd-kit/core": "6.1.0",
22
22
  "@formily/shared": "2.3.7",
23
- "@nocobase/plugin-mobile": "2.0.0-alpha.4",
23
+ "@nocobase/plugin-mobile": "2.0.0-alpha.41",
24
24
  "sequelize": "6.35.2",
25
- "@nocobase/database": "2.0.0-alpha.4",
26
- "@nocobase/server": "2.0.0-alpha.4",
27
- "@nocobase/data-source-manager": "2.0.0-alpha.4",
28
- "@nocobase/logger": "2.0.0-alpha.4",
29
- "@nocobase/evaluators": "2.0.0-alpha.4",
25
+ "@nocobase/database": "2.0.0-alpha.41",
26
+ "@nocobase/server": "2.0.0-alpha.41",
27
+ "@nocobase/data-source-manager": "2.0.0-alpha.41",
28
+ "@nocobase/logger": "2.0.0-alpha.41",
29
+ "@nocobase/evaluators": "2.0.0-alpha.41",
30
30
  "@formily/antd-v5": "1.2.3",
31
31
  "@formily/reactive": "2.3.7",
32
- "@nocobase/actions": "2.0.0-alpha.4",
32
+ "@nocobase/actions": "2.0.0-alpha.41",
33
33
  "dayjs": "1.11.13",
34
- "@nocobase/plugin-workflow-test": "2.0.0-alpha.4",
35
- "@nocobase/test": "2.0.0-alpha.4"
34
+ "@nocobase/plugin-workflow-test": "2.0.0-alpha.41",
35
+ "@nocobase/test": "2.0.0-alpha.41"
36
36
  };
@@ -179,16 +179,19 @@
179
179
  "Calculation result": "计算结果",
180
180
  "True": "真",
181
181
  "False": "假",
182
- "concat": "连接",
182
+ "Concat": "连接",
183
183
  "Condition": "条件判断",
184
184
  "Based on boolean result of the calculation to determine whether to \"continue\" or \"exit\" the process, or continue on different branches of \"yes\" and \"no\".":
185
185
  "基于计算结果的真假来决定“继续”或“退出”流程,或者在“是”与“否”的分支上分别继续。",
186
186
  "Mode": "模式",
187
+ "Yes": "是",
188
+ "No": "否",
187
189
  "Continue when \"Yes\"": "“是”则继续",
188
190
  "Branch into \"Yes\" and \"No\"": "“是”和“否”分别继续",
189
191
  "Condition expression": "条件表达式",
190
- "Inside of \"Yes\" branch": "“是”分支内",
191
- "Inside of \"No\" branch": "“否”分支内",
192
+ "Inside of \"{{branchName}}\" branch": "“{{branchName}}”分支内",
193
+ "\"{{branchName}}\" branch": "“{{branchName}}”分支",
194
+ "Branch {{index}}": "分支 {{index}}",
192
195
  "Create record": "新增数据",
193
196
  "Add new record to a collection. You can use variables from upstream nodes to assign values to fields.":
194
197
  "向一个数据表中添加新的数据。可以使用上游节点里的变量为字段赋值。",
@@ -235,7 +238,10 @@
235
238
  "Succeeded": "成功",
236
239
  "Test run": "测试执行",
237
240
  "Test run will do the actual data manipulating or API calling, please use with caution.": "测试执行会进行实际的数据操作或 API 调用,请谨慎使用。",
241
+ "Replace variables": "替换变量",
238
242
  "No variable": "无变量",
243
+ "Result": "结果",
244
+ "Log": "日志",
239
245
 
240
246
  "Add node": "添加节点",
241
247
  "Move all downstream nodes to": "将所有下游节点移至",
@@ -246,6 +252,11 @@
246
252
  "New version enabled": "已启用新版本",
247
253
  "Workflow is not exists": "工作流不存在",
248
254
 
255
+ "Delete node": "删除节点",
256
+ "Branch to keep": "保留分支",
257
+ "Delete all": "删除全部",
258
+ "Keep": "保留",
259
+
249
260
  "Select users": "选择用户",
250
261
  "Query users": "查询用户",
251
262
  "Add": "添加",
@@ -1 +1 @@
1
- {"name":"cron-parser","version":"4.4.0","description":"Node.js library for parsing crontab instructions","main":"lib/parser.js","types":"index.d.ts","typesVersions":{"<4.1":{"*":["types/ts3/*"]}},"directories":{"test":"test"},"scripts":{"test:tsd":"tsd","test:unit":"TZ=UTC tap ./test/*.js","test:cover":"TZ=UTC tap --coverage-report=html ./test/*.js","lint":"eslint .","lint:fix":"eslint --fix .","test":"npm run lint && npm run test:unit && npm run test:tsd"},"repository":{"type":"git","url":"https://github.com/harrisiirak/cron-parser.git"},"keywords":["cron","crontab","parser"],"author":"Harri Siirak","contributors":["Nicholas Clawson","Daniel Prentis <daniel@salsitasoft.com>","Renault John Lecoultre","Richard Astbury <richard.astbury@gmail.com>","Meaglin Wasabi <Meaglin.wasabi@gmail.com>","Mike Kusold <hello@mikekusold.com>","Alex Kit <alex.kit@atmajs.com>","Santiago Gimeno <santiago.gimeno@gmail.com>","Daniel <darc.tec@gmail.com>","Christian Steininger <christian.steininger.cs@gmail.com>","Mykola Piskovyi <m.piskovyi@gmail.com>","Brian Vaughn <brian.david.vaughn@gmail.com>","Nicholas Clawson <nickclaw@gmail.com>","Yasuhiroki <yasuhiroki.duck@gmail.com>","Nicholas Clawson <nickclaw@gmail.com>","Brendan Warkentin <faazshift@gmail.com>","Charlie Fish <fishcharlie.code@gmail.com>","Ian Graves <ian+diskimage@iangrav.es>","Andy Thompson <me@andytson.com>","Regev Brody <regevbr@gmail.com>"],"license":"MIT","dependencies":{"luxon":"^1.28.0"},"devDependencies":{"eslint":"^8.2.0","sinon":"^10.0.0","tap":"^16.0.1","tsd":"^0.19.0"},"engines":{"node":">=0.8"},"browser":{"fs":false},"tap":{"check-coverage":false},"tsd":{"directory":"test","compilerOptions":{"lib":["es2017","dom"]}},"_lastModified":"2025-10-05T10:00:23.201Z"}
1
+ {"name":"cron-parser","version":"4.4.0","description":"Node.js library for parsing crontab instructions","main":"lib/parser.js","types":"index.d.ts","typesVersions":{"<4.1":{"*":["types/ts3/*"]}},"directories":{"test":"test"},"scripts":{"test:tsd":"tsd","test:unit":"TZ=UTC tap ./test/*.js","test:cover":"TZ=UTC tap --coverage-report=html ./test/*.js","lint":"eslint .","lint:fix":"eslint --fix .","test":"npm run lint && npm run test:unit && npm run test:tsd"},"repository":{"type":"git","url":"https://github.com/harrisiirak/cron-parser.git"},"keywords":["cron","crontab","parser"],"author":"Harri Siirak","contributors":["Nicholas Clawson","Daniel Prentis <daniel@salsitasoft.com>","Renault John Lecoultre","Richard Astbury <richard.astbury@gmail.com>","Meaglin Wasabi <Meaglin.wasabi@gmail.com>","Mike Kusold <hello@mikekusold.com>","Alex Kit <alex.kit@atmajs.com>","Santiago Gimeno <santiago.gimeno@gmail.com>","Daniel <darc.tec@gmail.com>","Christian Steininger <christian.steininger.cs@gmail.com>","Mykola Piskovyi <m.piskovyi@gmail.com>","Brian Vaughn <brian.david.vaughn@gmail.com>","Nicholas Clawson <nickclaw@gmail.com>","Yasuhiroki <yasuhiroki.duck@gmail.com>","Nicholas Clawson <nickclaw@gmail.com>","Brendan Warkentin <faazshift@gmail.com>","Charlie Fish <fishcharlie.code@gmail.com>","Ian Graves <ian+diskimage@iangrav.es>","Andy Thompson <me@andytson.com>","Regev Brody <regevbr@gmail.com>"],"license":"MIT","dependencies":{"luxon":"^1.28.0"},"devDependencies":{"eslint":"^8.2.0","sinon":"^10.0.0","tap":"^16.0.1","tsd":"^0.19.0"},"engines":{"node":">=0.8"},"browser":{"fs":false},"tap":{"check-coverage":false},"tsd":{"directory":"test","compilerOptions":{"lib":["es2017","dom"]}},"_lastModified":"2025-11-19T01:23:32.490Z"}
@@ -1 +1 @@
1
- {"name":"lru-cache","description":"A cache object that deletes the least-recently-used items.","version":"8.0.5","author":"Isaac Z. Schlueter <i@izs.me>","keywords":["mru","lru","cache"],"sideEffects":false,"scripts":{"build":"npm run prepare","preprepare":"rm -rf dist","prepare":"tsc -p tsconfig.json && tsc -p tsconfig-esm.json","postprepare":"bash fixup.sh","pretest":"npm run prepare","presnap":"npm run prepare","test":"c8 tap","snap":"c8 tap","preversion":"npm test","postversion":"npm publish","prepublishOnly":"git push origin --follow-tags","format":"prettier --write .","typedoc":"typedoc --tsconfig tsconfig-esm.json ./src/*.ts","benchmark-results-typedoc":"bash scripts/benchmark-results-typedoc.sh","prebenchmark":"npm run prepare","benchmark":"make -C benchmark","preprofile":"npm run prepare","profile":"make -C benchmark profile"},"main":"./dist/cjs/index-cjs.js","module":"./dist/mjs/index.js","types":"./dist/mjs/index.d.ts","exports":{"./min":{"import":{"types":"./dist/mjs/index.d.ts","default":"./dist/mjs/index.min.js"},"require":{"types":"./dist/cjs/index.d.ts","default":"./dist/cjs/index.min.js"}},".":{"import":{"types":"./dist/mjs/index.d.ts","default":"./dist/mjs/index.js"},"require":{"types":"./dist/cjs/index.d.ts","default":"./dist/cjs/index-cjs.js"}}},"repository":"git://github.com/isaacs/node-lru-cache.git","devDependencies":{"@size-limit/preset-small-lib":"^7.0.8","@types/node":"^17.0.31","@types/tap":"^15.0.6","benchmark":"^2.1.4","c8":"^7.11.2","clock-mock":"^1.0.6","esbuild":"^0.17.11","eslint-config-prettier":"^8.5.0","marked":"^4.2.12","mkdirp":"^2.1.5","prettier":"^2.6.2","size-limit":"^7.0.8","tap":"^16.3.4","ts-node":"^10.7.0","tslib":"^2.4.0","typedoc":"^0.23.24","typescript":"^4.6.4"},"license":"ISC","files":["dist"],"engines":{"node":">=16.14"},"prettier":{"semi":false,"printWidth":70,"tabWidth":2,"useTabs":false,"singleQuote":true,"jsxSingleQuote":false,"bracketSameLine":true,"arrowParens":"avoid","endOfLine":"lf"},"tap":{"coverage":false,"node-arg":["--expose-gc","--no-warnings","--loader","ts-node/esm"],"ts":false},"size-limit":[{"path":"./dist/mjs/index.js"}],"_lastModified":"2025-10-05T10:00:22.839Z"}
1
+ {"name":"lru-cache","description":"A cache object that deletes the least-recently-used items.","version":"8.0.5","author":"Isaac Z. Schlueter <i@izs.me>","keywords":["mru","lru","cache"],"sideEffects":false,"scripts":{"build":"npm run prepare","preprepare":"rm -rf dist","prepare":"tsc -p tsconfig.json && tsc -p tsconfig-esm.json","postprepare":"bash fixup.sh","pretest":"npm run prepare","presnap":"npm run prepare","test":"c8 tap","snap":"c8 tap","preversion":"npm test","postversion":"npm publish","prepublishOnly":"git push origin --follow-tags","format":"prettier --write .","typedoc":"typedoc --tsconfig tsconfig-esm.json ./src/*.ts","benchmark-results-typedoc":"bash scripts/benchmark-results-typedoc.sh","prebenchmark":"npm run prepare","benchmark":"make -C benchmark","preprofile":"npm run prepare","profile":"make -C benchmark profile"},"main":"./dist/cjs/index-cjs.js","module":"./dist/mjs/index.js","types":"./dist/mjs/index.d.ts","exports":{"./min":{"import":{"types":"./dist/mjs/index.d.ts","default":"./dist/mjs/index.min.js"},"require":{"types":"./dist/cjs/index.d.ts","default":"./dist/cjs/index.min.js"}},".":{"import":{"types":"./dist/mjs/index.d.ts","default":"./dist/mjs/index.js"},"require":{"types":"./dist/cjs/index.d.ts","default":"./dist/cjs/index-cjs.js"}}},"repository":"git://github.com/isaacs/node-lru-cache.git","devDependencies":{"@size-limit/preset-small-lib":"^7.0.8","@types/node":"^17.0.31","@types/tap":"^15.0.6","benchmark":"^2.1.4","c8":"^7.11.2","clock-mock":"^1.0.6","esbuild":"^0.17.11","eslint-config-prettier":"^8.5.0","marked":"^4.2.12","mkdirp":"^2.1.5","prettier":"^2.6.2","size-limit":"^7.0.8","tap":"^16.3.4","ts-node":"^10.7.0","tslib":"^2.4.0","typedoc":"^0.23.24","typescript":"^4.6.4"},"license":"ISC","files":["dist"],"engines":{"node":">=16.14"},"prettier":{"semi":false,"printWidth":70,"tabWidth":2,"useTabs":false,"singleQuote":true,"jsxSingleQuote":false,"bracketSameLine":true,"arrowParens":"avoid","endOfLine":"lf"},"tap":{"coverage":false,"node-arg":["--expose-gc","--no-warnings","--loader","ts-node/esm"],"ts":false},"size-limit":[{"path":"./dist/mjs/index.js"}],"_lastModified":"2025-11-19T01:23:32.138Z"}
@@ -1 +1 @@
1
- {"name":"nodejs-snowflake","collaborators":["Utkarsh Srivastava <utkarsh@sagacious.dev>"],"description":"Generate time sortable 64 bits unique ids for distributed systems (inspired from twitter snowflake)","version":"2.0.1","license":"Apache 2.0","repository":{"type":"git","url":"https://github.com/utkarsh-pro/nodejs-snowflake.git"},"files":["nodejs_snowflake_bg.wasm","nodejs_snowflake.js","nodejs_snowflake.d.ts"],"main":"nodejs_snowflake.js","types":"nodejs_snowflake.d.ts","_lastModified":"2025-10-05T10:00:22.636Z"}
1
+ {"name":"nodejs-snowflake","collaborators":["Utkarsh Srivastava <utkarsh@sagacious.dev>"],"description":"Generate time sortable 64 bits unique ids for distributed systems (inspired from twitter snowflake)","version":"2.0.1","license":"Apache 2.0","repository":{"type":"git","url":"https://github.com/utkarsh-pro/nodejs-snowflake.git"},"files":["nodejs_snowflake_bg.wasm","nodejs_snowflake.js","nodejs_snowflake.d.ts"],"main":"nodejs_snowflake.js","types":"nodejs_snowflake.d.ts","_lastModified":"2025-11-19T01:23:31.948Z"}
@@ -14,7 +14,7 @@ import type PluginWorkflowServer from './Plugin';
14
14
  type Pending = {
15
15
  execution: ExecutionModel;
16
16
  job?: JobModel;
17
- force?: boolean;
17
+ loaded?: boolean;
18
18
  };
19
19
  export type EventOptions = {
20
20
  eventKey?: string;
@@ -26,7 +26,6 @@ export type EventOptions = {
26
26
  onTriggerFail?: Function;
27
27
  [key: string]: any;
28
28
  } & Transactionable;
29
- export declare const WORKER_JOB_WORKFLOW_PROCESS = "workflow:process";
30
29
  export default class Dispatcher {
31
30
  private readonly plugin;
32
31
  private ready;
@@ -38,7 +37,6 @@ export default class Dispatcher {
38
37
  constructor(plugin: PluginWorkflowServer);
39
38
  readonly onQueueExecution: QueueEventOptions['process'];
40
39
  setReady(ready: boolean): void;
41
- isReady(): boolean;
42
40
  getEventsCount(): number;
43
41
  trigger(workflow: WorkflowModel, context: object, options?: EventOptions): void | Promise<Processor | null>;
44
42
  resume(job: any): Promise<void>;
@@ -26,14 +26,13 @@ var __copyProps = (to, from, except, desc) => {
26
26
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
27
27
  var Dispatcher_exports = {};
28
28
  __export(Dispatcher_exports, {
29
- WORKER_JOB_WORKFLOW_PROCESS: () => WORKER_JOB_WORKFLOW_PROCESS,
30
29
  default: () => Dispatcher
31
30
  });
32
31
  module.exports = __toCommonJS(Dispatcher_exports);
33
32
  var import_crypto = require("crypto");
34
33
  var import_sequelize = require("sequelize");
35
34
  var import_constants = require("./constants");
36
- const WORKER_JOB_WORKFLOW_PROCESS = "workflow:process";
35
+ var import_Plugin = require("./Plugin");
37
36
  class Dispatcher {
38
37
  constructor(plugin) {
39
38
  this.plugin = plugin;
@@ -45,7 +44,7 @@ class Dispatcher {
45
44
  events = [];
46
45
  eventsCount = 0;
47
46
  get idle() {
48
- return !this.executing && !this.pending.length && !this.events.length;
47
+ return this.ready && !this.executing && !this.pending.length && !this.events.length;
49
48
  }
50
49
  onQueueExecution = async (event) => {
51
50
  const ExecutionRepo = this.plugin.db.getRepository("executions");
@@ -62,9 +61,6 @@ class Dispatcher {
62
61
  setReady(ready) {
63
62
  this.ready = ready;
64
63
  }
65
- isReady() {
66
- return this.ready;
67
- }
68
64
  getEventsCount() {
69
65
  return this.eventsCount;
70
66
  }
@@ -112,14 +108,14 @@ class Dispatcher {
112
108
  execution = await job.getExecution();
113
109
  }
114
110
  this.plugin.getLogger(execution.workflowId).info(`execution (${execution.id}) resuming from job (${job.id}) added to pending list`);
115
- this.run({ execution, job, force: true });
111
+ this.run({ execution, job, loaded: true });
116
112
  }
117
113
  async start(execution) {
118
114
  if (execution.status) {
119
115
  return;
120
116
  }
121
117
  this.plugin.getLogger(execution.workflowId).info(`starting deferred execution (${execution.id})`);
122
- this.run({ execution, force: true });
118
+ this.run({ execution, loaded: true });
123
119
  }
124
120
  async beforeStop() {
125
121
  this.ready = false;
@@ -135,10 +131,6 @@ class Dispatcher {
135
131
  this.plugin.getLogger("dispatcher").warn(`app is not ready, new dispatching will be ignored`);
136
132
  return;
137
133
  }
138
- if (!this.plugin.app.serving(WORKER_JOB_WORKFLOW_PROCESS)) {
139
- this.plugin.getLogger("dispatcher").warn(`${WORKER_JOB_WORKFLOW_PROCESS} is not serving, new dispatching will be ignored`);
140
- return;
141
- }
142
134
  if (this.executing) {
143
135
  this.plugin.getLogger("dispatcher").warn(`workflow executing is not finished, new dispatching will be ignored`);
144
136
  return;
@@ -151,25 +143,31 @@ class Dispatcher {
151
143
  let execution = null;
152
144
  if (this.pending.length) {
153
145
  const pending = this.pending.shift();
154
- execution = pending.force ? pending.execution : await this.acquirePendingExecution(pending.execution);
146
+ execution = pending.loaded ? pending.execution : await this.acquirePendingExecution(pending.execution);
155
147
  if (execution) {
156
148
  next = [execution, pending.job];
157
149
  this.plugin.getLogger(next[0].workflowId).info(`pending execution (${next[0].id}) ready to process`);
158
150
  }
159
151
  } else {
160
- execution = await this.acquireQueueingExecution();
161
- if (execution) {
162
- next = [execution];
152
+ if (this.plugin.serving()) {
153
+ execution = await this.acquireQueueingExecution();
154
+ if (execution) {
155
+ next = [execution];
156
+ }
157
+ } else {
158
+ this.plugin.getLogger("dispatcher").warn(`${import_Plugin.WORKER_JOB_WORKFLOW_PROCESS} is not serving on this instance, new dispatching will be ignored`);
163
159
  }
164
160
  }
165
161
  if (next) {
166
162
  await this.process(...next);
167
163
  }
168
- this.executing = null;
169
- if (next || this.pending.length) {
170
- this.plugin.getLogger("dispatcher").debug(`last process finished, will do another dispatch`);
171
- this.dispatch();
172
- }
164
+ setImmediate(() => {
165
+ this.executing = null;
166
+ if (next || this.pending.length) {
167
+ this.plugin.getLogger("dispatcher").debug(`last process finished, will do another dispatch`);
168
+ this.dispatch();
169
+ }
170
+ });
173
171
  })();
174
172
  }
175
173
  async run(pending) {
@@ -237,7 +235,9 @@ class Dispatcher {
237
235
  key: workflow.key,
238
236
  eventKey: options.eventKey ?? (0, import_crypto.randomUUID)(),
239
237
  stack: options.stack,
240
- dispatched: deferred ?? false
238
+ dispatched: deferred ?? false,
239
+ status: deferred ? import_constants.EXECUTION_STATUS.STARTED : import_constants.EXECUTION_STATUS.QUEUEING,
240
+ manually: options.manually
241
241
  },
242
242
  { transaction }
243
243
  );
@@ -283,13 +283,15 @@ class Dispatcher {
283
283
  try {
284
284
  const execution = await this.createExecution(...event);
285
285
  if (!(execution == null ? void 0 : execution.dispatched)) {
286
- if (!this.executing && !this.pending.length) {
286
+ if (this.plugin.serving() && !this.executing && !this.pending.length) {
287
287
  logger.info(`local pending list is empty, adding execution (${execution.id}) to pending list`);
288
288
  this.pending.push({ execution });
289
289
  } else {
290
- logger.info(`local pending list is not empty, sending execution (${execution.id}) to queue`);
290
+ logger.info(
291
+ `instance is not serving as worker or local pending list is not empty, sending execution (${execution.id}) to queue`
292
+ );
291
293
  if (this.ready) {
292
- this.plugin.app.backgroundJobManager.publish(`${this.plugin.name}.pendingExecution`, {
294
+ this.plugin.app.eventQueue.publish(this.plugin.channelPendingExecution, {
293
295
  executionId: execution.id
294
296
  });
295
297
  }
@@ -396,7 +398,3 @@ class Dispatcher {
396
398
  return processor;
397
399
  }
398
400
  }
399
- // Annotate the CommonJS export names for ESM import in node:
400
- 0 && (module.exports = {
401
- WORKER_JOB_WORKFLOW_PROCESS
402
- });
@@ -18,6 +18,7 @@ import Trigger from './triggers';
18
18
  import { InstructionInterface } from './instructions';
19
19
  import type { ExecutionModel, WorkflowModel } from './types';
20
20
  type ID = number | string;
21
+ export declare const WORKER_JOB_WORKFLOW_PROCESS = "workflow:process";
21
22
  export default class PluginWorkflowServer extends Plugin {
22
23
  instructions: Registry<InstructionInterface>;
23
24
  triggers: Registry<Trigger>;
@@ -25,6 +26,7 @@ export default class PluginWorkflowServer extends Plugin {
25
26
  enabledCache: Map<number, WorkflowModel>;
26
27
  snowflake: Snowflake;
27
28
  private dispatcher;
29
+ get channelPendingExecution(): string;
28
30
  private loggerCache;
29
31
  private meter;
30
32
  private checker;
@@ -35,6 +37,7 @@ export default class PluginWorkflowServer extends Plugin {
35
37
  private onAfterStart;
36
38
  private onBeforeStop;
37
39
  handleSyncMessage(message: any): Promise<void>;
40
+ serving(): boolean;
38
41
  /**
39
42
  * @experimental
40
43
  */
@@ -36,6 +36,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
36
36
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
37
37
  var Plugin_exports = {};
38
38
  __export(Plugin_exports, {
39
+ WORKER_JOB_WORKFLOW_PROCESS: () => WORKER_JOB_WORKFLOW_PROCESS,
39
40
  default: () => PluginWorkflowServer
40
41
  });
41
42
  module.exports = __toCommonJS(Plugin_exports);
@@ -59,6 +60,7 @@ var import_DestroyInstruction = __toESM(require("./instructions/DestroyInstructi
59
60
  var import_QueryInstruction = __toESM(require("./instructions/QueryInstruction"));
60
61
  var import_UpdateInstruction = __toESM(require("./instructions/UpdateInstruction"));
61
62
  var import_WorkflowRepository = __toESM(require("./repositories/WorkflowRepository"));
63
+ const WORKER_JOB_WORKFLOW_PROCESS = "workflow:process";
62
64
  class PluginWorkflowServer extends import_server.Plugin {
63
65
  instructions = new import_utils.Registry();
64
66
  triggers = new import_utils.Registry();
@@ -66,6 +68,9 @@ class PluginWorkflowServer extends import_server.Plugin {
66
68
  enabledCache = /* @__PURE__ */ new Map();
67
69
  snowflake;
68
70
  dispatcher = new import_Dispatcher.default(this);
71
+ get channelPendingExecution() {
72
+ return `${this.name}.pendingExecution`;
73
+ }
69
74
  loggerCache;
70
75
  meter = null;
71
76
  checker = null;
@@ -138,7 +143,6 @@ class PluginWorkflowServer extends import_server.Plugin {
138
143
  // * add all hooks for enabled workflows
139
144
  // * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks
140
145
  onAfterStart = async () => {
141
- this.dispatcher.setReady(true);
142
146
  const collection = this.db.getCollection("workflows");
143
147
  const workflows = await collection.repository.find({
144
148
  appends: ["versionStats"]
@@ -165,11 +169,13 @@ class PluginWorkflowServer extends import_server.Plugin {
165
169
  this.app.logger.info("workflow:dispatch");
166
170
  this.dispatcher.dispatch();
167
171
  });
172
+ this.dispatcher.setReady(true);
168
173
  this.getLogger("dispatcher").info("(starting) check for queueing executions");
169
174
  this.dispatcher.dispatch();
170
- this.dispatcher.setReady(true);
171
175
  };
172
176
  onBeforeStop = async () => {
177
+ this.dispatcher.setReady(false);
178
+ this.app.eventQueue.unsubscribe(this.channelPendingExecution);
173
179
  this.app.logger.info(`stopping workflow plugin before app (${this.app.name}) shutdown...`);
174
180
  for (const workflow of this.enabledCache.values()) {
175
181
  this.toggle(workflow, false, { silent: true });
@@ -202,13 +208,16 @@ class PluginWorkflowServer extends import_server.Plugin {
202
208
  }
203
209
  }
204
210
  }
211
+ serving() {
212
+ return this.app.serving(WORKER_JOB_WORKFLOW_PROCESS);
213
+ }
205
214
  /**
206
215
  * @experimental
207
216
  */
208
217
  getLogger(workflowId = "dispatcher") {
209
218
  const now = /* @__PURE__ */ new Date();
210
219
  const date = `${now.getFullYear()}-${`0${now.getMonth() + 1}`.slice(-2)}-${`0${now.getDate()}`.slice(-2)}`;
211
- const key = `${date}-${workflowId}}`;
220
+ const key = `${date}-${workflowId}`;
212
221
  if (this.loggerCache.has(key)) {
213
222
  return this.loggerCache.get(key);
214
223
  }
@@ -279,10 +288,6 @@ class PluginWorkflowServer extends import_server.Plugin {
279
288
  this.snowflake = new import_nodejs_snowflake.Snowflake({
280
289
  custom_epoch: pluginRecord == null ? void 0 : pluginRecord.createdAt.getTime()
281
290
  });
282
- this.app.backgroundJobManager.subscribe(`${this.name}.pendingExecution`, {
283
- idle: () => this.app.serving(import_Dispatcher.WORKER_JOB_WORKFLOW_PROCESS) && this.dispatcher.idle,
284
- process: this.dispatcher.onQueueExecution
285
- });
286
291
  }
287
292
  /**
288
293
  * @internal
@@ -297,14 +302,23 @@ class PluginWorkflowServer extends import_server.Plugin {
297
302
  max: 20,
298
303
  updateAgeOnGet: true,
299
304
  dispose(logger) {
300
- logger.end();
305
+ const cachedLogger = logger;
306
+ if (!cachedLogger) {
307
+ return;
308
+ }
309
+ cachedLogger.silent = true;
310
+ if (typeof cachedLogger.close === "function") {
311
+ cachedLogger.close();
312
+ }
301
313
  }
302
314
  });
303
315
  this.meter = this.app.telemetry.metric.getMeter();
304
- const counter = this.meter.createObservableGauge("workflow.events.counter");
305
- counter.addCallback((result) => {
306
- result.observe(this.dispatcher.getEventsCount());
307
- });
316
+ if (this.meter) {
317
+ const counter = this.meter.createObservableGauge("workflow.events.counter");
318
+ counter.addCallback((result) => {
319
+ result.observe(this.dispatcher.getEventsCount());
320
+ });
321
+ }
308
322
  this.app.acl.registerSnippet({
309
323
  name: `pm.${this.name}.workflows`,
310
324
  actions: [
@@ -333,12 +347,18 @@ class PluginWorkflowServer extends import_server.Plugin {
333
347
  db.on("workflows.afterDestroy", this.onAfterDestroy);
334
348
  this.app.on("afterStart", this.onAfterStart);
335
349
  this.app.on("beforeStop", this.onBeforeStop);
350
+ this.app.eventQueue.subscribe(this.channelPendingExecution, {
351
+ idle: () => this.serving() && this.dispatcher.idle,
352
+ process: this.dispatcher.onQueueExecution
353
+ });
336
354
  }
337
355
  toggle(workflow, enable, { silent, transaction } = {}) {
338
356
  const type = workflow.get("type");
339
357
  const trigger = this.triggers.get(type);
340
358
  if (!trigger) {
341
- this.getLogger(workflow.id).error(`trigger type ${workflow.type} of workflow ${workflow.id} is not implemented`);
359
+ this.getLogger(workflow.id).error(`trigger type ${workflow.type} of workflow ${workflow.id} is not implemented`, {
360
+ workflowId: workflow.id
361
+ });
342
362
  return;
343
363
  }
344
364
  const next = enable ?? workflow.get("enabled");
@@ -346,14 +366,20 @@ class PluginWorkflowServer extends import_server.Plugin {
346
366
  const prev = workflow.previous();
347
367
  if (prev.config) {
348
368
  trigger.off({ ...workflow.get(), ...prev });
349
- this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id} based on configuration before updated`);
369
+ this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id} based on configuration before updated`, {
370
+ workflowId: workflow.id
371
+ });
350
372
  }
351
373
  trigger.on(workflow);
352
- this.getLogger(workflow.id).info(`toggle ON workflow ${workflow.id}`);
374
+ this.getLogger(workflow.id).info(`toggle ON workflow ${workflow.id}`, {
375
+ workflowId: workflow.id
376
+ });
353
377
  this.enabledCache.set(workflow.id, workflow);
354
378
  } else {
355
379
  trigger.off(workflow);
356
- this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id}`);
380
+ this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id}`, {
381
+ workflowId: workflow.id
382
+ });
357
383
  this.enabledCache.delete(workflow.id);
358
384
  }
359
385
  if (!silent) {
@@ -453,3 +479,7 @@ class PluginWorkflowServer extends import_server.Plugin {
453
479
  }
454
480
  }
455
481
  }
482
+ // Annotate the CommonJS export names for ESM import in node:
483
+ 0 && (module.exports = {
484
+ WORKER_JOB_WORKFLOW_PROCESS
485
+ });
@@ -144,7 +144,9 @@ class Processor {
144
144
  async start() {
145
145
  const { execution } = this;
146
146
  if (execution.status) {
147
- this.logger.warn(`execution was ended with status ${execution.status} before, could not be started again`);
147
+ this.logger.warn(`execution was ended with status ${execution.status} before, could not be started again`, {
148
+ workflowId: execution.workflowId
149
+ });
148
150
  return;
149
151
  }
150
152
  await this.prepare();
@@ -158,7 +160,9 @@ class Processor {
158
160
  async resume(job) {
159
161
  const { execution } = this;
160
162
  if (execution.status) {
161
- this.logger.warn(`execution was ended with status ${execution.status} before, could not be resumed`);
163
+ this.logger.warn(`execution was ended with status ${execution.status} before, could not be resumed`, {
164
+ workflowId: execution.workflowId
165
+ });
162
166
  return;
163
167
  }
164
168
  await this.prepare();
@@ -168,7 +172,7 @@ class Processor {
168
172
  async exec(instruction, node, prevJob) {
169
173
  let job;
170
174
  try {
171
- this.logger.debug(`config of node`, { data: node.config });
175
+ this.logger.debug(`config of node`, { data: node.config, workflowId: node.workflowId });
172
176
  job = await instruction(node, prevJob, this);
173
177
  if (job === null) {
174
178
  return this.exit();
@@ -179,7 +183,7 @@ class Processor {
179
183
  } catch (err) {
180
184
  this.logger.error(
181
185
  `execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) failed: `,
182
- err
186
+ { error: err, workflowId: node.workflowId }
183
187
  );
184
188
  job = {
185
189
  result: err instanceof Error ? {
@@ -199,7 +203,10 @@ class Processor {
199
203
  }
200
204
  const savedJob = this.saveJob(job);
201
205
  this.logger.info(
202
- `execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) finished as status: ${savedJob.status}`
206
+ `execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) finished as status: ${savedJob.status}`,
207
+ {
208
+ workflowId: node.workflowId
209
+ }
203
210
  );
204
211
  this.logger.debug(`result of node`, { data: savedJob.result });
205
212
  if (savedJob.status === import_constants.JOB_STATUS.RESOLVED && node.downstream) {
@@ -217,7 +224,9 @@ class Processor {
217
224
  if (typeof instruction.run !== "function") {
218
225
  return Promise.reject(new Error("`run` should be implemented for customized execution of the node"));
219
226
  }
220
- this.logger.info(`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id})`);
227
+ this.logger.info(`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id})`, {
228
+ workflowId: node.workflowId
229
+ });
221
230
  return this.exec(instruction.run.bind(instruction), node, input);
222
231
  }
223
232
  // parent node should take over the control
@@ -225,7 +234,9 @@ class Processor {
225
234
  this.logger.debug(`branch ended at node (${node.id})`);
226
235
  const parentNode = this.findBranchParentNode(node);
227
236
  if (parentNode) {
228
- this.logger.debug(`not on main, recall to parent entry node (${node.id})})`);
237
+ this.logger.debug(`not on main, recall to parent entry node (${node.id})})`, {
238
+ workflowId: node.workflowId
239
+ });
229
240
  await this.recall(parentNode, job);
230
241
  return null;
231
242
  }
@@ -242,7 +253,9 @@ class Processor {
242
253
  new Error(`"resume" method should be implemented for [${node.type}] instruction of node (#${node.id})`)
243
254
  );
244
255
  }
245
- this.logger.info(`execution (${this.execution.id}) resume instruction [${node.type}] for node (${node.id})`);
256
+ this.logger.info(`execution (${this.execution.id}) resume instruction [${node.type}] for node (${node.id})`, {
257
+ workflowId: node.workflowId
258
+ });
246
259
  return this.exec(instruction.resume.bind(instruction), node, job);
247
260
  }
248
261
  async exit(s) {
@@ -295,7 +308,9 @@ class Processor {
295
308
  if (this.mainTransaction && this.mainTransaction !== this.transaction) {
296
309
  await this.mainTransaction.commit();
297
310
  }
298
- this.logger.info(`execution (${this.execution.id}) exiting with status ${this.execution.status}`);
311
+ this.logger.info(`execution (${this.execution.id}) exiting with status ${this.execution.status}`, {
312
+ workflowId: this.execution.workflowId
313
+ });
299
314
  return null;
300
315
  }
301
316
  /**
@@ -326,7 +341,9 @@ class Processor {
326
341
  this.lastSavedJob = job;
327
342
  this.jobsMapByNodeKey[job.nodeKey] = job;
328
343
  this.jobResultsMapByNodeKey[job.nodeKey] = job.result;
329
- this.logger.debug(`job added to save list: ${JSON.stringify(job)}`);
344
+ this.logger.debug(`job added to save list: ${JSON.stringify(job)}`, {
345
+ workflowId: this.execution.workflowId
346
+ });
330
347
  return job;
331
348
  }
332
349
  /**