@nocobase/plugin-workflow 2.0.0-alpha.4 → 2.0.0-alpha.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -98,9 +98,6 @@ export default class extends Instruction {
98
98
  branching: ({ rejectOnFalse }?: {
99
99
  rejectOnFalse?: boolean;
100
100
  }) => false | ({
101
- label: string;
102
- value: boolean;
103
- } | {
104
101
  label: string;
105
102
  value: 1;
106
103
  } | {
@@ -140,9 +140,9 @@ export declare const executionSchema: {
140
140
  unique?: undefined;
141
141
  onDelete?: undefined;
142
142
  } | {
143
- interface: string;
144
143
  type: string;
145
144
  name: string;
145
+ interface: string;
146
146
  uiSchema: {
147
147
  type: string;
148
148
  title: string;
@@ -129,9 +129,9 @@ declare const _default: {
129
129
  unique?: undefined;
130
130
  onDelete?: undefined;
131
131
  } | {
132
- interface: string;
133
132
  type: string;
134
133
  name: string;
134
+ interface: string;
135
135
  uiSchema: {
136
136
  type: string;
137
137
  title: string;
@@ -115,9 +115,9 @@ var executions_default = {
115
115
  name: "output"
116
116
  },
117
117
  {
118
- interface: "createdAt",
119
118
  type: "datetime",
120
119
  name: "createdAt",
120
+ interface: "createdAt",
121
121
  uiSchema: {
122
122
  type: "datetime",
123
123
  title: `{{t("Triggered at", { ns: "${import_constants.NAMESPACE}" })}}`,
@@ -125,6 +125,18 @@ var executions_default = {
125
125
  "x-component-props": {},
126
126
  "x-read-pretty": true
127
127
  }
128
+ },
129
+ {
130
+ type: "boolean",
131
+ name: "manually",
132
+ interface: "checkbox",
133
+ uiSchema: {
134
+ type: "boolean",
135
+ title: `{{t("Triggered manually", { ns: "${import_constants.NAMESPACE}" })}}`,
136
+ "x-component": "Checkbox",
137
+ "x-component-props": {},
138
+ "x-read-pretty": true
139
+ }
128
140
  }
129
141
  ],
130
142
  indexes: [{ fields: ["dispatched", "id"] }]
@@ -11,8 +11,8 @@ module.exports = {
11
11
  "react": "18.2.0",
12
12
  "@formily/core": "2.3.7",
13
13
  "@formily/react": "2.3.7",
14
- "@nocobase/client": "2.0.0-alpha.4",
15
- "@nocobase/utils": "2.0.0-alpha.4",
14
+ "@nocobase/client": "2.0.0-alpha.40",
15
+ "@nocobase/utils": "2.0.0-alpha.40",
16
16
  "antd": "5.24.2",
17
17
  "@ant-design/icons": "5.6.1",
18
18
  "react-router-dom": "6.30.1",
@@ -20,17 +20,17 @@ module.exports = {
20
20
  "lodash": "4.17.21",
21
21
  "@dnd-kit/core": "6.1.0",
22
22
  "@formily/shared": "2.3.7",
23
- "@nocobase/plugin-mobile": "2.0.0-alpha.4",
23
+ "@nocobase/plugin-mobile": "2.0.0-alpha.40",
24
24
  "sequelize": "6.35.2",
25
- "@nocobase/database": "2.0.0-alpha.4",
26
- "@nocobase/server": "2.0.0-alpha.4",
27
- "@nocobase/data-source-manager": "2.0.0-alpha.4",
28
- "@nocobase/logger": "2.0.0-alpha.4",
29
- "@nocobase/evaluators": "2.0.0-alpha.4",
25
+ "@nocobase/database": "2.0.0-alpha.40",
26
+ "@nocobase/server": "2.0.0-alpha.40",
27
+ "@nocobase/data-source-manager": "2.0.0-alpha.40",
28
+ "@nocobase/logger": "2.0.0-alpha.40",
29
+ "@nocobase/evaluators": "2.0.0-alpha.40",
30
30
  "@formily/antd-v5": "1.2.3",
31
31
  "@formily/reactive": "2.3.7",
32
- "@nocobase/actions": "2.0.0-alpha.4",
32
+ "@nocobase/actions": "2.0.0-alpha.40",
33
33
  "dayjs": "1.11.13",
34
- "@nocobase/plugin-workflow-test": "2.0.0-alpha.4",
35
- "@nocobase/test": "2.0.0-alpha.4"
34
+ "@nocobase/plugin-workflow-test": "2.0.0-alpha.40",
35
+ "@nocobase/test": "2.0.0-alpha.40"
36
36
  };
@@ -179,16 +179,19 @@
179
179
  "Calculation result": "计算结果",
180
180
  "True": "真",
181
181
  "False": "假",
182
- "concat": "连接",
182
+ "Concat": "连接",
183
183
  "Condition": "条件判断",
184
184
  "Based on boolean result of the calculation to determine whether to \"continue\" or \"exit\" the process, or continue on different branches of \"yes\" and \"no\".":
185
185
  "基于计算结果的真假来决定“继续”或“退出”流程,或者在“是”与“否”的分支上分别继续。",
186
186
  "Mode": "模式",
187
+ "Yes": "是",
188
+ "No": "否",
187
189
  "Continue when \"Yes\"": "“是”则继续",
188
190
  "Branch into \"Yes\" and \"No\"": "“是”和“否”分别继续",
189
191
  "Condition expression": "条件表达式",
190
- "Inside of \"Yes\" branch": "“是”分支内",
191
- "Inside of \"No\" branch": "“否”分支内",
192
+ "Inside of \"{{branchName}}\" branch": "“{{branchName}}”分支内",
193
+ "\"{{branchName}}\" branch": "“{{branchName}}”分支",
194
+ "Branch {{index}}": "分支 {{index}}",
192
195
  "Create record": "新增数据",
193
196
  "Add new record to a collection. You can use variables from upstream nodes to assign values to fields.":
194
197
  "向一个数据表中添加新的数据。可以使用上游节点里的变量为字段赋值。",
@@ -235,7 +238,10 @@
235
238
  "Succeeded": "成功",
236
239
  "Test run": "测试执行",
237
240
  "Test run will do the actual data manipulating or API calling, please use with caution.": "测试执行会进行实际的数据操作或 API 调用,请谨慎使用。",
241
+ "Replace variables": "替换变量",
238
242
  "No variable": "无变量",
243
+ "Result": "结果",
244
+ "Log": "日志",
239
245
 
240
246
  "Add node": "添加节点",
241
247
  "Move all downstream nodes to": "将所有下游节点移至",
@@ -246,6 +252,11 @@
246
252
  "New version enabled": "已启用新版本",
247
253
  "Workflow is not exists": "工作流不存在",
248
254
 
255
+ "Delete node": "删除节点",
256
+ "Branch to keep": "保留分支",
257
+ "Delete all": "删除全部",
258
+ "Keep": "保留",
259
+
249
260
  "Select users": "选择用户",
250
261
  "Query users": "查询用户",
251
262
  "Add": "添加",
@@ -1 +1 @@
1
- {"name":"cron-parser","version":"4.4.0","description":"Node.js library for parsing crontab instructions","main":"lib/parser.js","types":"index.d.ts","typesVersions":{"<4.1":{"*":["types/ts3/*"]}},"directories":{"test":"test"},"scripts":{"test:tsd":"tsd","test:unit":"TZ=UTC tap ./test/*.js","test:cover":"TZ=UTC tap --coverage-report=html ./test/*.js","lint":"eslint .","lint:fix":"eslint --fix .","test":"npm run lint && npm run test:unit && npm run test:tsd"},"repository":{"type":"git","url":"https://github.com/harrisiirak/cron-parser.git"},"keywords":["cron","crontab","parser"],"author":"Harri Siirak","contributors":["Nicholas Clawson","Daniel Prentis <daniel@salsitasoft.com>","Renault John Lecoultre","Richard Astbury <richard.astbury@gmail.com>","Meaglin Wasabi <Meaglin.wasabi@gmail.com>","Mike Kusold <hello@mikekusold.com>","Alex Kit <alex.kit@atmajs.com>","Santiago Gimeno <santiago.gimeno@gmail.com>","Daniel <darc.tec@gmail.com>","Christian Steininger <christian.steininger.cs@gmail.com>","Mykola Piskovyi <m.piskovyi@gmail.com>","Brian Vaughn <brian.david.vaughn@gmail.com>","Nicholas Clawson <nickclaw@gmail.com>","Yasuhiroki <yasuhiroki.duck@gmail.com>","Nicholas Clawson <nickclaw@gmail.com>","Brendan Warkentin <faazshift@gmail.com>","Charlie Fish <fishcharlie.code@gmail.com>","Ian Graves <ian+diskimage@iangrav.es>","Andy Thompson <me@andytson.com>","Regev Brody <regevbr@gmail.com>"],"license":"MIT","dependencies":{"luxon":"^1.28.0"},"devDependencies":{"eslint":"^8.2.0","sinon":"^10.0.0","tap":"^16.0.1","tsd":"^0.19.0"},"engines":{"node":">=0.8"},"browser":{"fs":false},"tap":{"check-coverage":false},"tsd":{"directory":"test","compilerOptions":{"lib":["es2017","dom"]}},"_lastModified":"2025-10-05T10:00:23.201Z"}
1
+ {"name":"cron-parser","version":"4.4.0","description":"Node.js library for parsing crontab instructions","main":"lib/parser.js","types":"index.d.ts","typesVersions":{"<4.1":{"*":["types/ts3/*"]}},"directories":{"test":"test"},"scripts":{"test:tsd":"tsd","test:unit":"TZ=UTC tap ./test/*.js","test:cover":"TZ=UTC tap --coverage-report=html ./test/*.js","lint":"eslint .","lint:fix":"eslint --fix .","test":"npm run lint && npm run test:unit && npm run test:tsd"},"repository":{"type":"git","url":"https://github.com/harrisiirak/cron-parser.git"},"keywords":["cron","crontab","parser"],"author":"Harri Siirak","contributors":["Nicholas Clawson","Daniel Prentis <daniel@salsitasoft.com>","Renault John Lecoultre","Richard Astbury <richard.astbury@gmail.com>","Meaglin Wasabi <Meaglin.wasabi@gmail.com>","Mike Kusold <hello@mikekusold.com>","Alex Kit <alex.kit@atmajs.com>","Santiago Gimeno <santiago.gimeno@gmail.com>","Daniel <darc.tec@gmail.com>","Christian Steininger <christian.steininger.cs@gmail.com>","Mykola Piskovyi <m.piskovyi@gmail.com>","Brian Vaughn <brian.david.vaughn@gmail.com>","Nicholas Clawson <nickclaw@gmail.com>","Yasuhiroki <yasuhiroki.duck@gmail.com>","Nicholas Clawson <nickclaw@gmail.com>","Brendan Warkentin <faazshift@gmail.com>","Charlie Fish <fishcharlie.code@gmail.com>","Ian Graves <ian+diskimage@iangrav.es>","Andy Thompson <me@andytson.com>","Regev Brody <regevbr@gmail.com>"],"license":"MIT","dependencies":{"luxon":"^1.28.0"},"devDependencies":{"eslint":"^8.2.0","sinon":"^10.0.0","tap":"^16.0.1","tsd":"^0.19.0"},"engines":{"node":">=0.8"},"browser":{"fs":false},"tap":{"check-coverage":false},"tsd":{"directory":"test","compilerOptions":{"lib":["es2017","dom"]}},"_lastModified":"2025-11-18T07:31:33.481Z"}
@@ -1 +1 @@
1
- {"name":"lru-cache","description":"A cache object that deletes the least-recently-used items.","version":"8.0.5","author":"Isaac Z. Schlueter <i@izs.me>","keywords":["mru","lru","cache"],"sideEffects":false,"scripts":{"build":"npm run prepare","preprepare":"rm -rf dist","prepare":"tsc -p tsconfig.json && tsc -p tsconfig-esm.json","postprepare":"bash fixup.sh","pretest":"npm run prepare","presnap":"npm run prepare","test":"c8 tap","snap":"c8 tap","preversion":"npm test","postversion":"npm publish","prepublishOnly":"git push origin --follow-tags","format":"prettier --write .","typedoc":"typedoc --tsconfig tsconfig-esm.json ./src/*.ts","benchmark-results-typedoc":"bash scripts/benchmark-results-typedoc.sh","prebenchmark":"npm run prepare","benchmark":"make -C benchmark","preprofile":"npm run prepare","profile":"make -C benchmark profile"},"main":"./dist/cjs/index-cjs.js","module":"./dist/mjs/index.js","types":"./dist/mjs/index.d.ts","exports":{"./min":{"import":{"types":"./dist/mjs/index.d.ts","default":"./dist/mjs/index.min.js"},"require":{"types":"./dist/cjs/index.d.ts","default":"./dist/cjs/index.min.js"}},".":{"import":{"types":"./dist/mjs/index.d.ts","default":"./dist/mjs/index.js"},"require":{"types":"./dist/cjs/index.d.ts","default":"./dist/cjs/index-cjs.js"}}},"repository":"git://github.com/isaacs/node-lru-cache.git","devDependencies":{"@size-limit/preset-small-lib":"^7.0.8","@types/node":"^17.0.31","@types/tap":"^15.0.6","benchmark":"^2.1.4","c8":"^7.11.2","clock-mock":"^1.0.6","esbuild":"^0.17.11","eslint-config-prettier":"^8.5.0","marked":"^4.2.12","mkdirp":"^2.1.5","prettier":"^2.6.2","size-limit":"^7.0.8","tap":"^16.3.4","ts-node":"^10.7.0","tslib":"^2.4.0","typedoc":"^0.23.24","typescript":"^4.6.4"},"license":"ISC","files":["dist"],"engines":{"node":">=16.14"},"prettier":{"semi":false,"printWidth":70,"tabWidth":2,"useTabs":false,"singleQuote":true,"jsxSingleQuote":false,"bracketSameLine":true,"arrowParens":"avoid","endOfLine":"lf"},"tap":{"coverage":false,"node-arg":["--expose-gc","--no-warnings","--loader","ts-node/esm"],"ts":false},"size-limit":[{"path":"./dist/mjs/index.js"}],"_lastModified":"2025-10-05T10:00:22.839Z"}
1
+ {"name":"lru-cache","description":"A cache object that deletes the least-recently-used items.","version":"8.0.5","author":"Isaac Z. Schlueter <i@izs.me>","keywords":["mru","lru","cache"],"sideEffects":false,"scripts":{"build":"npm run prepare","preprepare":"rm -rf dist","prepare":"tsc -p tsconfig.json && tsc -p tsconfig-esm.json","postprepare":"bash fixup.sh","pretest":"npm run prepare","presnap":"npm run prepare","test":"c8 tap","snap":"c8 tap","preversion":"npm test","postversion":"npm publish","prepublishOnly":"git push origin --follow-tags","format":"prettier --write .","typedoc":"typedoc --tsconfig tsconfig-esm.json ./src/*.ts","benchmark-results-typedoc":"bash scripts/benchmark-results-typedoc.sh","prebenchmark":"npm run prepare","benchmark":"make -C benchmark","preprofile":"npm run prepare","profile":"make -C benchmark profile"},"main":"./dist/cjs/index-cjs.js","module":"./dist/mjs/index.js","types":"./dist/mjs/index.d.ts","exports":{"./min":{"import":{"types":"./dist/mjs/index.d.ts","default":"./dist/mjs/index.min.js"},"require":{"types":"./dist/cjs/index.d.ts","default":"./dist/cjs/index.min.js"}},".":{"import":{"types":"./dist/mjs/index.d.ts","default":"./dist/mjs/index.js"},"require":{"types":"./dist/cjs/index.d.ts","default":"./dist/cjs/index-cjs.js"}}},"repository":"git://github.com/isaacs/node-lru-cache.git","devDependencies":{"@size-limit/preset-small-lib":"^7.0.8","@types/node":"^17.0.31","@types/tap":"^15.0.6","benchmark":"^2.1.4","c8":"^7.11.2","clock-mock":"^1.0.6","esbuild":"^0.17.11","eslint-config-prettier":"^8.5.0","marked":"^4.2.12","mkdirp":"^2.1.5","prettier":"^2.6.2","size-limit":"^7.0.8","tap":"^16.3.4","ts-node":"^10.7.0","tslib":"^2.4.0","typedoc":"^0.23.24","typescript":"^4.6.4"},"license":"ISC","files":["dist"],"engines":{"node":">=16.14"},"prettier":{"semi":false,"printWidth":70,"tabWidth":2,"useTabs":false,"singleQuote":true,"jsxSingleQuote":false,"bracketSameLine":true,"arrowParens":"avoid","endOfLine":"lf"},"tap":{"coverage":false,"node-arg":["--expose-gc","--no-warnings","--loader","ts-node/esm"],"ts":false},"size-limit":[{"path":"./dist/mjs/index.js"}],"_lastModified":"2025-11-18T07:31:33.104Z"}
@@ -1 +1 @@
1
- {"name":"nodejs-snowflake","collaborators":["Utkarsh Srivastava <utkarsh@sagacious.dev>"],"description":"Generate time sortable 64 bits unique ids for distributed systems (inspired from twitter snowflake)","version":"2.0.1","license":"Apache 2.0","repository":{"type":"git","url":"https://github.com/utkarsh-pro/nodejs-snowflake.git"},"files":["nodejs_snowflake_bg.wasm","nodejs_snowflake.js","nodejs_snowflake.d.ts"],"main":"nodejs_snowflake.js","types":"nodejs_snowflake.d.ts","_lastModified":"2025-10-05T10:00:22.636Z"}
1
+ {"name":"nodejs-snowflake","collaborators":["Utkarsh Srivastava <utkarsh@sagacious.dev>"],"description":"Generate time sortable 64 bits unique ids for distributed systems (inspired from twitter snowflake)","version":"2.0.1","license":"Apache 2.0","repository":{"type":"git","url":"https://github.com/utkarsh-pro/nodejs-snowflake.git"},"files":["nodejs_snowflake_bg.wasm","nodejs_snowflake.js","nodejs_snowflake.d.ts"],"main":"nodejs_snowflake.js","types":"nodejs_snowflake.d.ts","_lastModified":"2025-11-18T07:31:32.885Z"}
@@ -14,7 +14,7 @@ import type PluginWorkflowServer from './Plugin';
14
14
  type Pending = {
15
15
  execution: ExecutionModel;
16
16
  job?: JobModel;
17
- force?: boolean;
17
+ loaded?: boolean;
18
18
  };
19
19
  export type EventOptions = {
20
20
  eventKey?: string;
@@ -38,7 +38,6 @@ export default class Dispatcher {
38
38
  constructor(plugin: PluginWorkflowServer);
39
39
  readonly onQueueExecution: QueueEventOptions['process'];
40
40
  setReady(ready: boolean): void;
41
- isReady(): boolean;
42
41
  getEventsCount(): number;
43
42
  trigger(workflow: WorkflowModel, context: object, options?: EventOptions): void | Promise<Processor | null>;
44
43
  resume(job: any): Promise<void>;
@@ -45,7 +45,7 @@ class Dispatcher {
45
45
  events = [];
46
46
  eventsCount = 0;
47
47
  get idle() {
48
- return !this.executing && !this.pending.length && !this.events.length;
48
+ return this.ready && !this.executing && !this.pending.length && !this.events.length;
49
49
  }
50
50
  onQueueExecution = async (event) => {
51
51
  const ExecutionRepo = this.plugin.db.getRepository("executions");
@@ -62,9 +62,6 @@ class Dispatcher {
62
62
  setReady(ready) {
63
63
  this.ready = ready;
64
64
  }
65
- isReady() {
66
- return this.ready;
67
- }
68
65
  getEventsCount() {
69
66
  return this.eventsCount;
70
67
  }
@@ -112,14 +109,14 @@ class Dispatcher {
112
109
  execution = await job.getExecution();
113
110
  }
114
111
  this.plugin.getLogger(execution.workflowId).info(`execution (${execution.id}) resuming from job (${job.id}) added to pending list`);
115
- this.run({ execution, job, force: true });
112
+ this.run({ execution, job, loaded: true });
116
113
  }
117
114
  async start(execution) {
118
115
  if (execution.status) {
119
116
  return;
120
117
  }
121
118
  this.plugin.getLogger(execution.workflowId).info(`starting deferred execution (${execution.id})`);
122
- this.run({ execution, force: true });
119
+ this.run({ execution, loaded: true });
123
120
  }
124
121
  async beforeStop() {
125
122
  this.ready = false;
@@ -135,10 +132,6 @@ class Dispatcher {
135
132
  this.plugin.getLogger("dispatcher").warn(`app is not ready, new dispatching will be ignored`);
136
133
  return;
137
134
  }
138
- if (!this.plugin.app.serving(WORKER_JOB_WORKFLOW_PROCESS)) {
139
- this.plugin.getLogger("dispatcher").warn(`${WORKER_JOB_WORKFLOW_PROCESS} is not serving, new dispatching will be ignored`);
140
- return;
141
- }
142
135
  if (this.executing) {
143
136
  this.plugin.getLogger("dispatcher").warn(`workflow executing is not finished, new dispatching will be ignored`);
144
137
  return;
@@ -151,25 +144,31 @@ class Dispatcher {
151
144
  let execution = null;
152
145
  if (this.pending.length) {
153
146
  const pending = this.pending.shift();
154
- execution = pending.force ? pending.execution : await this.acquirePendingExecution(pending.execution);
147
+ execution = pending.loaded ? pending.execution : await this.acquirePendingExecution(pending.execution);
155
148
  if (execution) {
156
149
  next = [execution, pending.job];
157
150
  this.plugin.getLogger(next[0].workflowId).info(`pending execution (${next[0].id}) ready to process`);
158
151
  }
159
152
  } else {
160
- execution = await this.acquireQueueingExecution();
161
- if (execution) {
162
- next = [execution];
153
+ if (this.plugin.serving()) {
154
+ execution = await this.acquireQueueingExecution();
155
+ if (execution) {
156
+ next = [execution];
157
+ }
158
+ } else {
159
+ this.plugin.getLogger("dispatcher").warn(`${WORKER_JOB_WORKFLOW_PROCESS} is not serving on this instance, new dispatching will be ignored`);
163
160
  }
164
161
  }
165
162
  if (next) {
166
163
  await this.process(...next);
167
164
  }
168
- this.executing = null;
169
- if (next || this.pending.length) {
170
- this.plugin.getLogger("dispatcher").debug(`last process finished, will do another dispatch`);
171
- this.dispatch();
172
- }
165
+ setImmediate(() => {
166
+ this.executing = null;
167
+ if (next || this.pending.length) {
168
+ this.plugin.getLogger("dispatcher").debug(`last process finished, will do another dispatch`);
169
+ this.dispatch();
170
+ }
171
+ });
173
172
  })();
174
173
  }
175
174
  async run(pending) {
@@ -237,7 +236,9 @@ class Dispatcher {
237
236
  key: workflow.key,
238
237
  eventKey: options.eventKey ?? (0, import_crypto.randomUUID)(),
239
238
  stack: options.stack,
240
- dispatched: deferred ?? false
239
+ dispatched: deferred ?? false,
240
+ status: deferred ? import_constants.EXECUTION_STATUS.STARTED : import_constants.EXECUTION_STATUS.QUEUEING,
241
+ manually: options.manually
241
242
  },
242
243
  { transaction }
243
244
  );
@@ -283,11 +284,13 @@ class Dispatcher {
283
284
  try {
284
285
  const execution = await this.createExecution(...event);
285
286
  if (!(execution == null ? void 0 : execution.dispatched)) {
286
- if (!this.executing && !this.pending.length) {
287
+ if (this.plugin.serving() && !this.executing && !this.pending.length) {
287
288
  logger.info(`local pending list is empty, adding execution (${execution.id}) to pending list`);
288
289
  this.pending.push({ execution });
289
290
  } else {
290
- logger.info(`local pending list is not empty, sending execution (${execution.id}) to queue`);
291
+ logger.info(
292
+ `instance is not serving as worker or local pending list is not empty, sending execution (${execution.id}) to queue`
293
+ );
291
294
  if (this.ready) {
292
295
  this.plugin.app.backgroundJobManager.publish(`${this.plugin.name}.pendingExecution`, {
293
296
  executionId: execution.id
@@ -35,6 +35,7 @@ export default class PluginWorkflowServer extends Plugin {
35
35
  private onAfterStart;
36
36
  private onBeforeStop;
37
37
  handleSyncMessage(message: any): Promise<void>;
38
+ serving(): boolean;
38
39
  /**
39
40
  * @experimental
40
41
  */
@@ -138,7 +138,6 @@ class PluginWorkflowServer extends import_server.Plugin {
138
138
  // * add all hooks for enabled workflows
139
139
  // * add hooks for create/update[enabled]/delete workflow to add/remove specific hooks
140
140
  onAfterStart = async () => {
141
- this.dispatcher.setReady(true);
142
141
  const collection = this.db.getCollection("workflows");
143
142
  const workflows = await collection.repository.find({
144
143
  appends: ["versionStats"]
@@ -165,11 +164,12 @@ class PluginWorkflowServer extends import_server.Plugin {
165
164
  this.app.logger.info("workflow:dispatch");
166
165
  this.dispatcher.dispatch();
167
166
  });
167
+ this.dispatcher.setReady(true);
168
168
  this.getLogger("dispatcher").info("(starting) check for queueing executions");
169
169
  this.dispatcher.dispatch();
170
- this.dispatcher.setReady(true);
171
170
  };
172
171
  onBeforeStop = async () => {
172
+ this.dispatcher.setReady(false);
173
173
  this.app.logger.info(`stopping workflow plugin before app (${this.app.name}) shutdown...`);
174
174
  for (const workflow of this.enabledCache.values()) {
175
175
  this.toggle(workflow, false, { silent: true });
@@ -202,13 +202,16 @@ class PluginWorkflowServer extends import_server.Plugin {
202
202
  }
203
203
  }
204
204
  }
205
+ serving() {
206
+ return this.app.serving(import_Dispatcher.WORKER_JOB_WORKFLOW_PROCESS);
207
+ }
205
208
  /**
206
209
  * @experimental
207
210
  */
208
211
  getLogger(workflowId = "dispatcher") {
209
212
  const now = /* @__PURE__ */ new Date();
210
213
  const date = `${now.getFullYear()}-${`0${now.getMonth() + 1}`.slice(-2)}-${`0${now.getDate()}`.slice(-2)}`;
211
- const key = `${date}-${workflowId}}`;
214
+ const key = `${date}-${workflowId}`;
212
215
  if (this.loggerCache.has(key)) {
213
216
  return this.loggerCache.get(key);
214
217
  }
@@ -279,10 +282,6 @@ class PluginWorkflowServer extends import_server.Plugin {
279
282
  this.snowflake = new import_nodejs_snowflake.Snowflake({
280
283
  custom_epoch: pluginRecord == null ? void 0 : pluginRecord.createdAt.getTime()
281
284
  });
282
- this.app.backgroundJobManager.subscribe(`${this.name}.pendingExecution`, {
283
- idle: () => this.app.serving(import_Dispatcher.WORKER_JOB_WORKFLOW_PROCESS) && this.dispatcher.idle,
284
- process: this.dispatcher.onQueueExecution
285
- });
286
285
  }
287
286
  /**
288
287
  * @internal
@@ -297,14 +296,23 @@ class PluginWorkflowServer extends import_server.Plugin {
297
296
  max: 20,
298
297
  updateAgeOnGet: true,
299
298
  dispose(logger) {
300
- logger.end();
299
+ const cachedLogger = logger;
300
+ if (!cachedLogger) {
301
+ return;
302
+ }
303
+ cachedLogger.silent = true;
304
+ if (typeof cachedLogger.close === "function") {
305
+ cachedLogger.close();
306
+ }
301
307
  }
302
308
  });
303
309
  this.meter = this.app.telemetry.metric.getMeter();
304
- const counter = this.meter.createObservableGauge("workflow.events.counter");
305
- counter.addCallback((result) => {
306
- result.observe(this.dispatcher.getEventsCount());
307
- });
310
+ if (this.meter) {
311
+ const counter = this.meter.createObservableGauge("workflow.events.counter");
312
+ counter.addCallback((result) => {
313
+ result.observe(this.dispatcher.getEventsCount());
314
+ });
315
+ }
308
316
  this.app.acl.registerSnippet({
309
317
  name: `pm.${this.name}.workflows`,
310
318
  actions: [
@@ -333,12 +341,20 @@ class PluginWorkflowServer extends import_server.Plugin {
333
341
  db.on("workflows.afterDestroy", this.onAfterDestroy);
334
342
  this.app.on("afterStart", this.onAfterStart);
335
343
  this.app.on("beforeStop", this.onBeforeStop);
344
+ if (this.serving()) {
345
+ this.app.backgroundJobManager.subscribe(`${this.name}.pendingExecution`, {
346
+ idle: () => this.dispatcher.idle,
347
+ process: this.dispatcher.onQueueExecution
348
+ });
349
+ }
336
350
  }
337
351
  toggle(workflow, enable, { silent, transaction } = {}) {
338
352
  const type = workflow.get("type");
339
353
  const trigger = this.triggers.get(type);
340
354
  if (!trigger) {
341
- this.getLogger(workflow.id).error(`trigger type ${workflow.type} of workflow ${workflow.id} is not implemented`);
355
+ this.getLogger(workflow.id).error(`trigger type ${workflow.type} of workflow ${workflow.id} is not implemented`, {
356
+ workflowId: workflow.id
357
+ });
342
358
  return;
343
359
  }
344
360
  const next = enable ?? workflow.get("enabled");
@@ -346,14 +362,20 @@ class PluginWorkflowServer extends import_server.Plugin {
346
362
  const prev = workflow.previous();
347
363
  if (prev.config) {
348
364
  trigger.off({ ...workflow.get(), ...prev });
349
- this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id} based on configuration before updated`);
365
+ this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id} based on configuration before updated`, {
366
+ workflowId: workflow.id
367
+ });
350
368
  }
351
369
  trigger.on(workflow);
352
- this.getLogger(workflow.id).info(`toggle ON workflow ${workflow.id}`);
370
+ this.getLogger(workflow.id).info(`toggle ON workflow ${workflow.id}`, {
371
+ workflowId: workflow.id
372
+ });
353
373
  this.enabledCache.set(workflow.id, workflow);
354
374
  } else {
355
375
  trigger.off(workflow);
356
- this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id}`);
376
+ this.getLogger(workflow.id).info(`toggle OFF workflow ${workflow.id}`, {
377
+ workflowId: workflow.id
378
+ });
357
379
  this.enabledCache.delete(workflow.id);
358
380
  }
359
381
  if (!silent) {
@@ -144,7 +144,9 @@ class Processor {
144
144
  async start() {
145
145
  const { execution } = this;
146
146
  if (execution.status) {
147
- this.logger.warn(`execution was ended with status ${execution.status} before, could not be started again`);
147
+ this.logger.warn(`execution was ended with status ${execution.status} before, could not be started again`, {
148
+ workflowId: execution.workflowId
149
+ });
148
150
  return;
149
151
  }
150
152
  await this.prepare();
@@ -158,7 +160,9 @@ class Processor {
158
160
  async resume(job) {
159
161
  const { execution } = this;
160
162
  if (execution.status) {
161
- this.logger.warn(`execution was ended with status ${execution.status} before, could not be resumed`);
163
+ this.logger.warn(`execution was ended with status ${execution.status} before, could not be resumed`, {
164
+ workflowId: execution.workflowId
165
+ });
162
166
  return;
163
167
  }
164
168
  await this.prepare();
@@ -168,7 +172,7 @@ class Processor {
168
172
  async exec(instruction, node, prevJob) {
169
173
  let job;
170
174
  try {
171
- this.logger.debug(`config of node`, { data: node.config });
175
+ this.logger.debug(`config of node`, { data: node.config, workflowId: node.workflowId });
172
176
  job = await instruction(node, prevJob, this);
173
177
  if (job === null) {
174
178
  return this.exit();
@@ -179,7 +183,7 @@ class Processor {
179
183
  } catch (err) {
180
184
  this.logger.error(
181
185
  `execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) failed: `,
182
- err
186
+ { error: err, workflowId: node.workflowId }
183
187
  );
184
188
  job = {
185
189
  result: err instanceof Error ? {
@@ -199,7 +203,10 @@ class Processor {
199
203
  }
200
204
  const savedJob = this.saveJob(job);
201
205
  this.logger.info(
202
- `execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) finished as status: ${savedJob.status}`
206
+ `execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id}) finished as status: ${savedJob.status}`,
207
+ {
208
+ workflowId: node.workflowId
209
+ }
203
210
  );
204
211
  this.logger.debug(`result of node`, { data: savedJob.result });
205
212
  if (savedJob.status === import_constants.JOB_STATUS.RESOLVED && node.downstream) {
@@ -217,7 +224,9 @@ class Processor {
217
224
  if (typeof instruction.run !== "function") {
218
225
  return Promise.reject(new Error("`run` should be implemented for customized execution of the node"));
219
226
  }
220
- this.logger.info(`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id})`);
227
+ this.logger.info(`execution (${this.execution.id}) run instruction [${node.type}] for node (${node.id})`, {
228
+ workflowId: node.workflowId
229
+ });
221
230
  return this.exec(instruction.run.bind(instruction), node, input);
222
231
  }
223
232
  // parent node should take over the control
@@ -225,7 +234,9 @@ class Processor {
225
234
  this.logger.debug(`branch ended at node (${node.id})`);
226
235
  const parentNode = this.findBranchParentNode(node);
227
236
  if (parentNode) {
228
- this.logger.debug(`not on main, recall to parent entry node (${node.id})})`);
237
+ this.logger.debug(`not on main, recall to parent entry node (${node.id})})`, {
238
+ workflowId: node.workflowId
239
+ });
229
240
  await this.recall(parentNode, job);
230
241
  return null;
231
242
  }
@@ -242,7 +253,9 @@ class Processor {
242
253
  new Error(`"resume" method should be implemented for [${node.type}] instruction of node (#${node.id})`)
243
254
  );
244
255
  }
245
- this.logger.info(`execution (${this.execution.id}) resume instruction [${node.type}] for node (${node.id})`);
256
+ this.logger.info(`execution (${this.execution.id}) resume instruction [${node.type}] for node (${node.id})`, {
257
+ workflowId: node.workflowId
258
+ });
246
259
  return this.exec(instruction.resume.bind(instruction), node, job);
247
260
  }
248
261
  async exit(s) {
@@ -295,7 +308,9 @@ class Processor {
295
308
  if (this.mainTransaction && this.mainTransaction !== this.transaction) {
296
309
  await this.mainTransaction.commit();
297
310
  }
298
- this.logger.info(`execution (${this.execution.id}) exiting with status ${this.execution.status}`);
311
+ this.logger.info(`execution (${this.execution.id}) exiting with status ${this.execution.status}`, {
312
+ workflowId: this.execution.workflowId
313
+ });
299
314
  return null;
300
315
  }
301
316
  /**
@@ -326,7 +341,9 @@ class Processor {
326
341
  this.lastSavedJob = job;
327
342
  this.jobsMapByNodeKey[job.nodeKey] = job;
328
343
  this.jobResultsMapByNodeKey[job.nodeKey] = job.result;
329
- this.logger.debug(`job added to save list: ${JSON.stringify(job)}`);
344
+ this.logger.debug(`job added to save list: ${JSON.stringify(job)}`, {
345
+ workflowId: this.execution.workflowId
346
+ });
330
347
  return job;
331
348
  }
332
349
  /**