@dbos-inc/dbos-sdk 2.2.10-preview.g90e74a1e32 → 2.3.9-preview

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (175) hide show
  1. package/.husky/pre-commit +1 -0
  2. package/.prettierignore +3 -0
  3. package/.prettierrc +9 -0
  4. package/CODE_OF_CONDUCT.md +24 -18
  5. package/CONTRIBUTING.md +12 -10
  6. package/README.md +2 -2
  7. package/compose.yaml +17 -17
  8. package/dbos-config.schema.json +4 -13
  9. package/dist/schemas/user_db_schema.d.ts.map +1 -1
  10. package/dist/schemas/user_db_schema.js.map +1 -1
  11. package/dist/src/context.d.ts +12 -12
  12. package/dist/src/context.d.ts.map +1 -1
  13. package/dist/src/context.js +9 -9
  14. package/dist/src/context.js.map +1 -1
  15. package/dist/src/data_validation.d.ts +1 -1
  16. package/dist/src/data_validation.d.ts.map +1 -1
  17. package/dist/src/data_validation.js +14 -8
  18. package/dist/src/data_validation.js.map +1 -1
  19. package/dist/src/dbos-executor.d.ts +4 -3
  20. package/dist/src/dbos-executor.d.ts.map +1 -1
  21. package/dist/src/dbos-executor.js +152 -141
  22. package/dist/src/dbos-executor.js.map +1 -1
  23. package/dist/src/dbos-runtime/cli.d.ts +3 -3
  24. package/dist/src/dbos-runtime/cli.d.ts.map +1 -1
  25. package/dist/src/dbos-runtime/cli.js +80 -39
  26. package/dist/src/dbos-runtime/cli.js.map +1 -1
  27. package/dist/src/dbos-runtime/cloudutils/authentication.d.ts +1 -1
  28. package/dist/src/dbos-runtime/cloudutils/authentication.d.ts.map +1 -1
  29. package/dist/src/dbos-runtime/cloudutils/authentication.js +14 -14
  30. package/dist/src/dbos-runtime/cloudutils/authentication.js.map +1 -1
  31. package/dist/src/dbos-runtime/cloudutils/cloudutils.d.ts +2 -2
  32. package/dist/src/dbos-runtime/cloudutils/cloudutils.d.ts.map +1 -1
  33. package/dist/src/dbos-runtime/cloudutils/cloudutils.js +32 -32
  34. package/dist/src/dbos-runtime/cloudutils/cloudutils.js.map +1 -1
  35. package/dist/src/dbos-runtime/cloudutils/databases.d.ts +2 -2
  36. package/dist/src/dbos-runtime/cloudutils/databases.d.ts.map +1 -1
  37. package/dist/src/dbos-runtime/cloudutils/databases.js +25 -21
  38. package/dist/src/dbos-runtime/cloudutils/databases.js.map +1 -1
  39. package/dist/src/dbos-runtime/commands.d.ts +1 -1
  40. package/dist/src/dbos-runtime/commands.js +9 -9
  41. package/dist/src/dbos-runtime/config.d.ts +7 -7
  42. package/dist/src/dbos-runtime/config.d.ts.map +1 -1
  43. package/dist/src/dbos-runtime/config.js +39 -23
  44. package/dist/src/dbos-runtime/config.js.map +1 -1
  45. package/dist/src/dbos-runtime/configure.d.ts.map +1 -1
  46. package/dist/src/dbos-runtime/configure.js.map +1 -1
  47. package/dist/src/dbos-runtime/db_connection.d.ts.map +1 -1
  48. package/dist/src/dbos-runtime/db_connection.js +2 -2
  49. package/dist/src/dbos-runtime/db_connection.js.map +1 -1
  50. package/dist/src/dbos-runtime/db_wizard.d.ts +1 -1
  51. package/dist/src/dbos-runtime/db_wizard.d.ts.map +1 -1
  52. package/dist/src/dbos-runtime/db_wizard.js +18 -18
  53. package/dist/src/dbos-runtime/db_wizard.js.map +1 -1
  54. package/dist/src/dbos-runtime/debug.d.ts +3 -3
  55. package/dist/src/dbos-runtime/debug.d.ts.map +1 -1
  56. package/dist/src/dbos-runtime/debug.js +7 -12
  57. package/dist/src/dbos-runtime/debug.js.map +1 -1
  58. package/dist/src/dbos-runtime/migrate.d.ts +2 -2
  59. package/dist/src/dbos-runtime/migrate.d.ts.map +1 -1
  60. package/dist/src/dbos-runtime/migrate.js +14 -10
  61. package/dist/src/dbos-runtime/migrate.js.map +1 -1
  62. package/dist/src/dbos-runtime/reset.d.ts +2 -2
  63. package/dist/src/dbos-runtime/reset.js +2 -2
  64. package/dist/src/dbos-runtime/reset.js.map +1 -1
  65. package/dist/src/dbos-runtime/runtime.d.ts.map +1 -1
  66. package/dist/src/dbos-runtime/runtime.js +4 -4
  67. package/dist/src/dbos-runtime/runtime.js.map +1 -1
  68. package/dist/src/dbos-runtime/workflow_management.d.ts +5 -4
  69. package/dist/src/dbos-runtime/workflow_management.d.ts.map +1 -1
  70. package/dist/src/dbos-runtime/workflow_management.js +34 -14
  71. package/dist/src/dbos-runtime/workflow_management.js.map +1 -1
  72. package/dist/src/dbos.d.ts +23 -23
  73. package/dist/src/dbos.d.ts.map +1 -1
  74. package/dist/src/dbos.js +59 -59
  75. package/dist/src/dbos.js.map +1 -1
  76. package/dist/src/debugpoint.d.ts.map +1 -1
  77. package/dist/src/debugpoint.js +4 -4
  78. package/dist/src/debugpoint.js.map +1 -1
  79. package/dist/src/decorators.d.ts +8 -8
  80. package/dist/src/decorators.d.ts.map +1 -1
  81. package/dist/src/decorators.js +36 -33
  82. package/dist/src/decorators.js.map +1 -1
  83. package/dist/src/error.d.ts.map +1 -1
  84. package/dist/src/error.js +6 -5
  85. package/dist/src/error.js.map +1 -1
  86. package/dist/src/eventreceiver.d.ts +1 -1
  87. package/dist/src/eventreceiver.d.ts.map +1 -1
  88. package/dist/src/httpServer/handler.d.ts +8 -8
  89. package/dist/src/httpServer/handler.d.ts.map +1 -1
  90. package/dist/src/httpServer/handler.js.map +1 -1
  91. package/dist/src/httpServer/handlerTypes.d.ts.map +1 -1
  92. package/dist/src/httpServer/handlerTypes.js.map +1 -1
  93. package/dist/src/httpServer/middleware.d.ts +9 -9
  94. package/dist/src/httpServer/middleware.d.ts.map +1 -1
  95. package/dist/src/httpServer/middleware.js +6 -6
  96. package/dist/src/httpServer/middleware.js.map +1 -1
  97. package/dist/src/httpServer/server.d.ts +2 -2
  98. package/dist/src/httpServer/server.d.ts.map +1 -1
  99. package/dist/src/httpServer/server.js +27 -33
  100. package/dist/src/httpServer/server.js.map +1 -1
  101. package/dist/src/index.d.ts +16 -16
  102. package/dist/src/index.d.ts.map +1 -1
  103. package/dist/src/index.js.map +1 -1
  104. package/dist/src/kafka/kafka.d.ts.map +1 -1
  105. package/dist/src/kafka/kafka.js +2 -2
  106. package/dist/src/kafka/kafka.js.map +1 -1
  107. package/dist/src/procedure.d.ts +6 -6
  108. package/dist/src/procedure.d.ts.map +1 -1
  109. package/dist/src/procedure.js.map +1 -1
  110. package/dist/src/scheduler/crontab.d.ts.map +1 -1
  111. package/dist/src/scheduler/crontab.js +54 -33
  112. package/dist/src/scheduler/crontab.js.map +1 -1
  113. package/dist/src/scheduler/scheduler.d.ts +3 -3
  114. package/dist/src/scheduler/scheduler.d.ts.map +1 -1
  115. package/dist/src/scheduler/scheduler.js +7 -7
  116. package/dist/src/scheduler/scheduler.js.map +1 -1
  117. package/dist/src/step.d.ts +4 -4
  118. package/dist/src/step.d.ts.map +1 -1
  119. package/dist/src/step.js.map +1 -1
  120. package/dist/src/system_database.d.ts +20 -13
  121. package/dist/src/system_database.d.ts.map +1 -1
  122. package/dist/src/system_database.js +168 -60
  123. package/dist/src/system_database.js.map +1 -1
  124. package/dist/src/telemetry/collector.d.ts +3 -3
  125. package/dist/src/telemetry/exporters.d.ts +1 -1
  126. package/dist/src/telemetry/exporters.js.map +1 -1
  127. package/dist/src/telemetry/index.d.ts +5 -5
  128. package/dist/src/telemetry/logs.d.ts +4 -4
  129. package/dist/src/telemetry/logs.d.ts.map +1 -1
  130. package/dist/src/telemetry/logs.js +18 -18
  131. package/dist/src/telemetry/logs.js.map +1 -1
  132. package/dist/src/telemetry/traces.d.ts +3 -3
  133. package/dist/src/telemetry/traces.js +7 -7
  134. package/dist/src/testing/testing_runtime.d.ts +11 -11
  135. package/dist/src/testing/testing_runtime.d.ts.map +1 -1
  136. package/dist/src/testing/testing_runtime.js +15 -8
  137. package/dist/src/testing/testing_runtime.js.map +1 -1
  138. package/dist/src/transaction.d.ts +6 -6
  139. package/dist/src/transaction.d.ts.map +1 -1
  140. package/dist/src/transaction.js +4 -4
  141. package/dist/src/transaction.js.map +1 -1
  142. package/dist/src/user_database.d.ts +4 -4
  143. package/dist/src/user_database.d.ts.map +1 -1
  144. package/dist/src/user_database.js +45 -45
  145. package/dist/src/user_database.js.map +1 -1
  146. package/dist/src/utils.d.ts.map +1 -1
  147. package/dist/src/utils.js +6 -12
  148. package/dist/src/utils.js.map +1 -1
  149. package/dist/src/wfqueue.d.ts +1 -1
  150. package/dist/src/wfqueue.d.ts.map +1 -1
  151. package/dist/src/wfqueue.js +8 -8
  152. package/dist/src/wfqueue.js.map +1 -1
  153. package/dist/src/workflow.d.ts +22 -10
  154. package/dist/src/workflow.d.ts.map +1 -1
  155. package/dist/src/workflow.js +19 -18
  156. package/dist/src/workflow.js.map +1 -1
  157. package/dist/tsconfig.build.tsbuildinfo +1 -1
  158. package/eslint.config.js +29 -27
  159. package/migrations/20240123182943_schema.js +7 -8
  160. package/migrations/20240123183021_tables.js +52 -48
  161. package/migrations/20240123183025_indexes.js +11 -14
  162. package/migrations/20240123183030_triggers.js +7 -8
  163. package/migrations/20240124015239_status_timestamp.js +12 -18
  164. package/migrations/20240201213211_replica_identity.js +8 -11
  165. package/migrations/20240205223925_foreign_keys.js +40 -18
  166. package/migrations/20240207192338_executor_id_index.js +8 -10
  167. package/migrations/20240430090000_tables.js +8 -10
  168. package/migrations/20240516004341_application_version.js +10 -12
  169. package/migrations/20240517000000_status_class_config.js +10 -14
  170. package/migrations/20240621000000_workflow_tries.js +8 -12
  171. package/migrations/20240924000000_workflowqueue.js +32 -23
  172. package/migrations/20241009150000_event_dispatch_kv.js +12 -14
  173. package/migrations/20252101000000_workflow_queues_executor_id.js +7 -9
  174. package/package.json +9 -2
  175. package/src/dbos-runtime/cloudutils/README.md +1 -1
@@ -29,17 +29,17 @@ const wfqueue_1 = require("./wfqueue");
29
29
  const debugpoint_1 = require("./debugpoint");
30
30
  exports.dbosNull = {};
31
31
  exports.OperationType = {
32
- HANDLER: "handler",
33
- WORKFLOW: "workflow",
34
- TRANSACTION: "transaction",
35
- COMMUNICATOR: "communicator",
36
- PROCEDURE: "procedure",
32
+ HANDLER: 'handler',
33
+ WORKFLOW: 'workflow',
34
+ TRANSACTION: 'transaction',
35
+ COMMUNICATOR: 'communicator',
36
+ PROCEDURE: 'procedure',
37
37
  };
38
38
  const TempWorkflowType = {
39
- transaction: "transaction",
40
- procedure: "procedure",
41
- external: "external",
42
- send: "send",
39
+ transaction: 'transaction',
40
+ procedure: 'procedure',
41
+ external: 'external',
42
+ send: 'send',
43
43
  };
44
44
  class DBOSExecutor {
45
45
  config;
@@ -50,14 +50,14 @@ class DBOSExecutor {
50
50
  systemDatabase;
51
51
  procedurePool;
52
52
  // Temporary workflows are created by calling transaction/send/recv directly from the executor class
53
- static tempWorkflowName = "temp_workflow";
53
+ static tempWorkflowName = 'temp_workflow';
54
54
  workflowInfoMap = new Map([
55
55
  // We initialize the map with an entry for temporary workflows.
56
56
  [
57
57
  DBOSExecutor.tempWorkflowName,
58
58
  {
59
59
  workflow: async () => {
60
- this.logger.error("UNREACHABLE: Indirect invoke of temp workflow");
60
+ this.logger.error('UNREACHABLE: Indirect invoke of temp workflow');
61
61
  return Promise.resolve();
62
62
  },
63
63
  config: {},
@@ -76,8 +76,7 @@ class DBOSExecutor {
76
76
  isFlushingBuffers = false;
77
77
  static defaultNotificationTimeoutSec = 60;
78
78
  debugMode;
79
- debugProxy;
80
- static systemDBSchemaName = "dbos";
79
+ static systemDBSchemaName = 'dbos';
81
80
  logger;
82
81
  tracer;
83
82
  // eslint-disable-next-line @typescript-eslint/ban-types
@@ -86,17 +85,16 @@ class DBOSExecutor {
86
85
  eventReceivers = [];
87
86
  scheduler = undefined;
88
87
  wfqEnded = undefined;
89
- executorID = process.env.DBOS__VMID || "local";
88
+ executorID = process.env.DBOS__VMID || 'local';
90
89
  static globalInstance = undefined;
91
90
  /* WORKFLOW EXECUTOR LIFE CYCLE MANAGEMENT */
92
91
  constructor(config, systemDatabase) {
93
92
  this.config = config;
94
93
  this.debugMode = config.debugMode ?? false;
95
- this.debugProxy = config.debugProxy;
96
94
  // Set configured environment variables
97
95
  if (config.env) {
98
96
  for (const [key, value] of Object.entries(config.env)) {
99
- if (typeof value === "string") {
97
+ if (typeof value === 'string') {
100
98
  process.env[key] = value;
101
99
  }
102
100
  else {
@@ -115,27 +113,15 @@ class DBOSExecutor {
115
113
  this.logger = new logs_1.GlobalLogger(this.telemetryCollector, this.config.telemetry?.logs);
116
114
  this.tracer = new traces_1.Tracer(this.telemetryCollector);
117
115
  if (this.debugMode) {
118
- this.logger.info("Running in debug mode!");
119
- if (this.debugProxy) {
120
- try {
121
- const url = new URL(this.config.debugProxy);
122
- this.config.poolConfig.host = url.hostname;
123
- this.config.poolConfig.port = parseInt(url.port, 10);
124
- this.logger.info(`Debugging mode proxy: ${this.config.poolConfig.host}:${this.config.poolConfig.port}`);
125
- }
126
- catch (err) {
127
- this.logger.error(err);
128
- throw err;
129
- }
130
- }
116
+ this.logger.info('Running in debug mode!');
131
117
  }
132
118
  this.procedurePool = new pg_1.Pool(this.config.poolConfig);
133
119
  if (systemDatabase) {
134
- this.logger.debug("Using provided system database"); // XXX print the name or something
120
+ this.logger.debug('Using provided system database'); // XXX print the name or something
135
121
  this.systemDatabase = systemDatabase;
136
122
  }
137
123
  else {
138
- this.logger.debug("Using Postgres system database");
124
+ this.logger.debug('Using Postgres system database');
139
125
  this.systemDatabase = new system_database_1.PostgresSystemDatabase(this.config.poolConfig, this.config.system_database, this.logger);
140
126
  }
141
127
  this.flushBufferID = setInterval(() => {
@@ -144,7 +130,7 @@ class DBOSExecutor {
144
130
  void this.flushWorkflowBuffers();
145
131
  }
146
132
  }, this.flushBufferIntervalMs);
147
- this.logger.debug("Started workflow status buffer worker");
133
+ this.logger.debug('Started workflow status buffer worker');
148
134
  this.initialized = false;
149
135
  DBOSExecutor.globalInstance = this;
150
136
  }
@@ -154,25 +140,26 @@ class DBOSExecutor {
154
140
  if (userDbClient === user_database_1.UserDatabaseName.PRISMA) {
155
141
  // TODO: make Prisma work with debugger proxy.
156
142
  // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-require-imports
157
- const { PrismaClient } = require(node_path_1.default.join(process.cwd(), "node_modules", "@prisma", "client")); // Find the prisma client in the node_modules of the current project
143
+ const { PrismaClient } = require(node_path_1.default.join(process.cwd(), 'node_modules', '@prisma', 'client')); // Find the prisma client in the node_modules of the current project
144
+ this.userDatabase = new user_database_1.PrismaUserDatabase(
158
145
  // eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-call
159
- this.userDatabase = new user_database_1.PrismaUserDatabase(new PrismaClient({
146
+ new PrismaClient({
160
147
  datasources: {
161
148
  db: {
162
149
  url: `postgresql://${userDBConfig.user}:${userDBConfig.password}@${userDBConfig.host}:${userDBConfig.port}/${userDBConfig.database}`,
163
150
  },
164
- }
151
+ },
165
152
  }));
166
- this.logger.debug("Loaded Prisma user database");
153
+ this.logger.debug('Loaded Prisma user database');
167
154
  }
168
155
  else if (userDbClient === user_database_1.UserDatabaseName.TYPEORM) {
169
156
  // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-require-imports
170
- const DataSourceExports = require("typeorm");
157
+ const DataSourceExports = require('typeorm');
171
158
  try {
172
159
  this.userDatabase = new user_database_1.TypeORMDatabase(
173
160
  // eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
174
161
  new DataSourceExports.DataSource({
175
- type: "postgres", // perhaps should move to config file
162
+ type: 'postgres', // perhaps should move to config file
176
163
  host: userDBConfig.host,
177
164
  port: userDBConfig.port,
178
165
  username: userDBConfig.user,
@@ -186,11 +173,11 @@ class DBOSExecutor {
186
173
  s.message = `Error loading TypeORM user database: ${s.message}`;
187
174
  this.logger.error(s);
188
175
  }
189
- this.logger.debug("Loaded TypeORM user database");
176
+ this.logger.debug('Loaded TypeORM user database');
190
177
  }
191
178
  else if (userDbClient === user_database_1.UserDatabaseName.KNEX) {
192
179
  const knexConfig = {
193
- client: "postgres",
180
+ client: 'postgres',
194
181
  connection: {
195
182
  host: userDBConfig.host,
196
183
  port: userDBConfig.port,
@@ -201,21 +188,21 @@ class DBOSExecutor {
201
188
  },
202
189
  };
203
190
  this.userDatabase = new user_database_1.KnexUserDatabase((0, knex_1.default)(knexConfig));
204
- this.logger.debug("Loaded Knex user database");
191
+ this.logger.debug('Loaded Knex user database');
205
192
  }
206
193
  else if (userDbClient === user_database_1.UserDatabaseName.DRIZZLE) {
207
194
  // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-require-imports
208
- const DrizzleExports = require("drizzle-orm/node-postgres");
195
+ const DrizzleExports = require('drizzle-orm/node-postgres');
209
196
  const drizzlePool = new pg_1.Pool(userDBConfig);
210
197
  // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
211
198
  const drizzle = DrizzleExports.drizzle(drizzlePool, { schema: this.drizzleEntities });
212
199
  // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
213
200
  this.userDatabase = new user_database_1.DrizzleUserDatabase(drizzlePool, drizzle);
214
- this.logger.debug("Loaded Drizzle user database");
201
+ this.logger.debug('Loaded Drizzle user database');
215
202
  }
216
203
  else {
217
204
  this.userDatabase = new user_database_1.PGNodeUserDatabase(userDBConfig);
218
- this.logger.debug("Loaded Postgres user database");
205
+ this.logger.debug('Loaded Postgres user database');
219
206
  }
220
207
  }
221
208
  #registerClass(cls) {
@@ -253,7 +240,7 @@ class DBOSExecutor {
253
240
  }
254
241
  async init(classes) {
255
242
  if (this.initialized) {
256
- this.logger.error("Workflow executor already initialized!");
243
+ this.logger.error('Workflow executor already initialized!');
257
244
  return;
258
245
  }
259
246
  if (!classes || !classes.length) {
@@ -267,7 +254,7 @@ class DBOSExecutor {
267
254
  * With TSORM, we take an array of entities (Function[]) and add them to this.entities:
268
255
  */
269
256
  if (Array.isArray(reg.ormEntities)) {
270
- this.typeormEntities = (this.typeormEntities).concat(reg.ormEntities);
257
+ this.typeormEntities = this.typeormEntities.concat(reg.ormEntities);
271
258
  length = reg.ormEntities.length;
272
259
  }
273
260
  else {
@@ -279,11 +266,13 @@ class DBOSExecutor {
279
266
  }
280
267
  this.logger.debug(`Loaded ${length} ORM entities`);
281
268
  }
282
- await (0, user_database_1.createDBIfDoesNotExist)(this.config.poolConfig, this.logger);
269
+ if (!this.debugMode) {
270
+ await (0, user_database_1.createDBIfDoesNotExist)(this.config.poolConfig, this.logger);
271
+ }
283
272
  this.configureDbClient();
284
273
  if (!this.userDatabase) {
285
- this.logger.error("No user database configured!");
286
- throw new error_1.DBOSInitializationError("No user database configured!");
274
+ this.logger.error('No user database configured!');
275
+ throw new error_1.DBOSInitializationError('No user database configured!');
287
276
  }
288
277
  for (const cls of classes) {
289
278
  this.#registerClass(cls);
@@ -324,30 +313,30 @@ class DBOSExecutor {
324
313
  for (const v of this.registeredOperations) {
325
314
  const m = v;
326
315
  if (m.init === true) {
327
- this.logger.debug("Executing init method: " + m.name);
316
+ this.logger.debug('Executing init method: ' + m.name);
328
317
  await m.origFunction(new context_1.InitContext(this));
329
318
  }
330
319
  }
331
320
  await this.recoverPendingWorkflows();
332
321
  }
333
- this.logger.info("Workflow executor initialized");
322
+ this.logger.info('Workflow executor initialized');
334
323
  }
335
324
  #logNotice(msg) {
336
325
  switch (msg.severity) {
337
- case "INFO":
338
- case "LOG":
339
- case "NOTICE":
326
+ case 'INFO':
327
+ case 'LOG':
328
+ case 'NOTICE':
340
329
  this.logger.info(msg.message);
341
330
  break;
342
- case "WARNING":
331
+ case 'WARNING':
343
332
  this.logger.warn(msg.message);
344
333
  break;
345
- case "DEBUG":
334
+ case 'DEBUG':
346
335
  this.logger.debug(msg.message);
347
336
  break;
348
- case "ERROR":
349
- case "FATAL":
350
- case "PANIC":
337
+ case 'ERROR':
338
+ case 'FATAL':
339
+ case 'PANIC':
351
340
  this.logger.error(msg.message);
352
341
  break;
353
342
  default:
@@ -356,7 +345,7 @@ class DBOSExecutor {
356
345
  }
357
346
  async destroy() {
358
347
  if (this.pendingWorkflowMap.size > 0) {
359
- this.logger.info("Waiting for pending workflows to finish.");
348
+ this.logger.info('Waiting for pending workflows to finish.');
360
349
  await Promise.allSettled(this.pendingWorkflowMap.values());
361
350
  }
362
351
  clearInterval(this.flushBufferID);
@@ -365,7 +354,7 @@ class DBOSExecutor {
365
354
  await this.flushWorkflowBuffers();
366
355
  }
367
356
  while (this.isFlushingBuffers) {
368
- this.logger.info("Waiting for result buffers to be exported.");
357
+ this.logger.info('Waiting for result buffers to be exported.');
369
358
  await (0, utils_1.sleepms)(1000);
370
359
  }
371
360
  await this.systemDatabase.destroy();
@@ -439,9 +428,7 @@ class DBOSExecutor {
439
428
  this.logger.debug(`Registered stored proc ${cfn}`);
440
429
  }
441
430
  getWorkflowInfo(wf) {
442
- const wfname = (wf.name === DBOSExecutor.tempWorkflowName)
443
- ? wf.name
444
- : (0, decorators_1.getRegisteredMethodClassName)(wf) + '.' + wf.name;
431
+ const wfname = wf.name === DBOSExecutor.tempWorkflowName ? wf.name : (0, decorators_1.getRegisteredMethodClassName)(wf) + '.' + wf.name;
445
432
  return this.workflowInfoMap.get(wfname);
446
433
  }
447
434
  getWorkflowInfoByStatus(wf) {
@@ -498,14 +485,14 @@ class DBOSExecutor {
498
485
  const wCtxt = new workflow_1.WorkflowContextImpl(this, params.parentCtx, workflowUUID, wConfig, wf.name, presetUUID, params.tempWfType, params.tempWfName);
499
486
  const internalStatus = {
500
487
  workflowUUID: workflowUUID,
501
- status: (params.queueName !== undefined) ? workflow_1.StatusString.ENQUEUED : workflow_1.StatusString.PENDING,
488
+ status: params.queueName !== undefined ? workflow_1.StatusString.ENQUEUED : workflow_1.StatusString.PENDING,
502
489
  name: wf.name,
503
- className: wCtxt.isTempWorkflow ? "" : (0, decorators_1.getRegisteredMethodClassName)(wf),
504
- configName: params.configuredInstance?.name || "",
490
+ className: wCtxt.isTempWorkflow ? '' : (0, decorators_1.getRegisteredMethodClassName)(wf),
491
+ configName: params.configuredInstance?.name || '',
505
492
  queueName: params.queueName,
506
493
  authenticatedUser: wCtxt.authenticatedUser,
507
494
  output: undefined,
508
- error: "",
495
+ error: '',
509
496
  assumedRole: wCtxt.assumedRole,
510
497
  authenticatedRoles: wCtxt.authenticatedRoles,
511
498
  request: wCtxt.request,
@@ -514,18 +501,17 @@ class DBOSExecutor {
514
501
  applicationID: wCtxt.applicationID,
515
502
  createdAt: Date.now(), // Remember the start time of this workflow
516
503
  maxRetries: wCtxt.maxRecoveryAttempts,
517
- recovery: params.recovery === true,
518
504
  };
519
505
  if (wCtxt.isTempWorkflow) {
520
506
  internalStatus.name = `${DBOSExecutor.tempWorkflowName}-${wCtxt.tempWfOperationType}-${wCtxt.tempWfOperationName}`;
521
- internalStatus.className = params.tempWfClass ?? "";
507
+ internalStatus.className = params.tempWfClass ?? '';
522
508
  }
523
509
  let status = undefined;
524
510
  // Synchronously set the workflow's status to PENDING and record workflow inputs (for non single-transaction workflows).
525
511
  // We have to do it for all types of workflows because operation_outputs table has a foreign key constraint on workflow status table.
526
- if ((wCtxt.tempWfOperationType !== TempWorkflowType.transaction
527
- && wCtxt.tempWfOperationType !== TempWorkflowType.procedure)
528
- || params.queueName !== undefined) {
512
+ if ((wCtxt.tempWfOperationType !== TempWorkflowType.transaction &&
513
+ wCtxt.tempWfOperationType !== TempWorkflowType.procedure) ||
514
+ params.queueName !== undefined) {
529
515
  if (this.debugMode) {
530
516
  const wfStatus = await this.systemDatabase.getWorkflowStatus(workflowUUID);
531
517
  const wfInputs = await this.systemDatabase.getWorkflowInputs(workflowUUID);
@@ -590,7 +576,7 @@ class DBOSExecutor {
590
576
  // Retrieve the handle and wait for the result.
591
577
  const retrievedHandle = this.retrieveWorkflow(workflowUUID);
592
578
  result = await retrievedHandle.getResult();
593
- wCtxt.span.setAttribute("cached", true);
579
+ wCtxt.span.setAttribute('cached', true);
594
580
  wCtxt.span.setStatus({ code: api_1.SpanStatusCode.OK });
595
581
  }
596
582
  else {
@@ -616,8 +602,8 @@ class DBOSExecutor {
616
602
  }
617
603
  finally {
618
604
  this.tracer.endSpan(wCtxt.span);
619
- if (wCtxt.tempWfOperationType === TempWorkflowType.transaction
620
- || wCtxt.tempWfOperationType === TempWorkflowType.procedure) {
605
+ if (wCtxt.tempWfOperationType === TempWorkflowType.transaction ||
606
+ wCtxt.tempWfOperationType === TempWorkflowType.procedure) {
621
607
  // For single-transaction workflows, asynchronously record inputs.
622
608
  // We must buffer inputs after workflow status is buffered/flushed because workflow_inputs table has a foreign key reference to the workflow_status table.
623
609
  if (!this.debugMode) {
@@ -631,12 +617,13 @@ class DBOSExecutor {
631
617
  }
632
618
  return result;
633
619
  };
634
- if (this.debugMode || (status !== 'SUCCESS' && status !== 'ERROR' && (params.queueName === undefined || params.executeWorkflow))) {
620
+ if (this.debugMode ||
621
+ (status !== 'SUCCESS' && status !== 'ERROR' && (params.queueName === undefined || params.executeWorkflow))) {
635
622
  const workflowPromise = runWorkflow();
636
623
  // Need to await for the workflow and capture errors.
637
624
  const awaitWorkflowPromise = workflowPromise
638
625
  .catch((error) => {
639
- this.logger.debug("Captured error in awaitWorkflowPromise: " + error);
626
+ this.logger.debug('Captured error in awaitWorkflowPromise: ' + error);
640
627
  })
641
628
  .finally(() => {
642
629
  // Remove itself from pending workflow map.
@@ -663,7 +650,7 @@ class DBOSExecutor {
663
650
  * Retrieve the transaction snapshot information of the current transaction
664
651
  */
665
652
  static async #retrieveSnapshot(query) {
666
- const rows = await query("SELECT pg_current_snapshot()::text as txn_snapshot;", []);
653
+ const rows = await query('SELECT pg_current_snapshot()::text as txn_snapshot;', []);
667
654
  return rows[0].txn_snapshot;
668
655
  }
669
656
  /**
@@ -675,14 +662,14 @@ class DBOSExecutor {
675
662
  */
676
663
  async #checkExecution(query, workflowUUID, funcID) {
677
664
  // Note: we read the current snapshot, not the recorded one!
678
- const rows = await query("(SELECT output, error, txn_snapshot, true as recorded FROM dbos.transaction_outputs WHERE workflow_uuid=$1 AND function_id=$2 UNION ALL SELECT null as output, null as error, pg_current_snapshot()::text as txn_snapshot, false as recorded) ORDER BY recorded", [workflowUUID, funcID]);
665
+ const rows = await query('(SELECT output, error, txn_snapshot, true as recorded FROM dbos.transaction_outputs WHERE workflow_uuid=$1 AND function_id=$2 UNION ALL SELECT null as output, null as error, pg_current_snapshot()::text as txn_snapshot, false as recorded) ORDER BY recorded', [workflowUUID, funcID]);
679
666
  if (rows.length === 0 || rows.length > 2) {
680
- this.logger.error("Unexpected! This should never happen. Returned rows: " + rows.toString());
681
- throw new error_1.DBOSError("This should never happen. Returned rows: " + rows.toString());
667
+ this.logger.error('Unexpected! This should never happen. Returned rows: ' + rows.toString());
668
+ throw new error_1.DBOSError('This should never happen. Returned rows: ' + rows.toString());
682
669
  }
683
670
  const res = {
684
671
  output: exports.dbosNull,
685
- txn_snapshot: ""
672
+ txn_snapshot: '',
686
673
  };
687
674
  // recorded=false row will be first because we used ORDER BY.
688
675
  res.txn_snapshot = rows[0].txn_snapshot;
@@ -701,11 +688,11 @@ class DBOSExecutor {
701
688
  */
702
689
  async #recordOutput(query, workflowUUID, funcID, txnSnapshot, output, isKeyConflict) {
703
690
  if (this.debugMode) {
704
- throw new error_1.DBOSDebuggerError("Cannot record output in debug mode.");
691
+ throw new error_1.DBOSDebuggerError('Cannot record output in debug mode.');
705
692
  }
706
693
  try {
707
694
  const serialOutput = utils_1.DBOSJSON.stringify(output);
708
- const rows = await query("INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, output, txn_id, txn_snapshot, created_at) VALUES ($1, $2, $3, (select pg_current_xact_id_if_assigned()::text), $4, $5) RETURNING txn_id;", [workflowUUID, funcID, serialOutput, txnSnapshot, Date.now()]);
695
+ const rows = await query('INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, output, txn_id, txn_snapshot, created_at) VALUES ($1, $2, $3, (select pg_current_xact_id_if_assigned()::text), $4, $5) RETURNING txn_id;', [workflowUUID, funcID, serialOutput, txnSnapshot, Date.now()]);
709
696
  return rows[0].txn_id;
710
697
  }
711
698
  catch (error) {
@@ -723,11 +710,11 @@ class DBOSExecutor {
723
710
  */
724
711
  async #recordError(query, workflowUUID, funcID, txnSnapshot, err, isKeyConflict) {
725
712
  if (this.debugMode) {
726
- throw new error_1.DBOSDebuggerError("Cannot record error in debug mode.");
713
+ throw new error_1.DBOSDebuggerError('Cannot record error in debug mode.');
727
714
  }
728
715
  try {
729
716
  const serialErr = utils_1.DBOSJSON.stringify((0, serialize_error_1.serializeError)(err));
730
- await query("INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, error, txn_id, txn_snapshot, created_at) VALUES ($1, $2, $3, null, $4, $5) RETURNING txn_id;", [workflowUUID, funcID, serialErr, txnSnapshot, Date.now()]);
717
+ await query('INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, error, txn_id, txn_snapshot, created_at) VALUES ($1, $2, $3, null, $4, $5) RETURNING txn_id;', [workflowUUID, funcID, serialErr, txnSnapshot, Date.now()]);
731
718
  }
732
719
  catch (error) {
733
720
  if (isKeyConflict(error)) {
@@ -749,11 +736,11 @@ class DBOSExecutor {
749
736
  return;
750
737
  }
751
738
  if (this.debugMode) {
752
- throw new error_1.DBOSDebuggerError("Cannot flush result buffer in debug mode.");
739
+ throw new error_1.DBOSDebuggerError('Cannot flush result buffer in debug mode.');
753
740
  }
754
741
  funcIDs.sort();
755
742
  try {
756
- let sqlStmt = "INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, output, error, txn_id, txn_snapshot, created_at) VALUES ";
743
+ let sqlStmt = 'INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, output, error, txn_id, txn_snapshot, created_at) VALUES ';
757
744
  let paramCnt = 1;
758
745
  const values = [];
759
746
  for (const funcID of funcIDs) {
@@ -764,7 +751,7 @@ class DBOSExecutor {
764
751
  const txnSnapshot = recorded.txn_snapshot;
765
752
  const createdAt = recorded.created_at;
766
753
  if (paramCnt > 1) {
767
- sqlStmt += ", ";
754
+ sqlStmt += ', ';
768
755
  }
769
756
  sqlStmt += `($${paramCnt++}, $${paramCnt++}, $${paramCnt++}, $${paramCnt++}, null, $${paramCnt++}, $${paramCnt++})`;
770
757
  values.push(workflowUUID, funcID, utils_1.DBOSJSON.stringify(output), utils_1.DBOSJSON.stringify(null), txnSnapshot, createdAt);
@@ -787,7 +774,7 @@ class DBOSExecutor {
787
774
  return this.#flushResultBuffer(func, resultBuffer, workflowUUID, (error) => this.userDatabase.isKeyConflictError(error));
788
775
  }
789
776
  #flushResultBufferProc(client, resultBuffer, workflowUUID) {
790
- const func = (sql, args) => client.query(sql, args).then(v => v.rows);
777
+ const func = (sql, args) => client.query(sql, args).then((v) => v.rows);
791
778
  return this.#flushResultBuffer(func, resultBuffer, workflowUUID, user_database_1.pgNodeIsKeyConflictError);
792
779
  }
793
780
  async transaction(txn, params, ...args) {
@@ -823,7 +810,7 @@ class DBOSExecutor {
823
810
  isolationLevel: txnInfo.config.isolationLevel,
824
811
  }, wfCtx.span);
825
812
  while (true) {
826
- let txn_snapshot = "invalid";
813
+ let txn_snapshot = 'invalid';
827
814
  const workflowUUID = wfCtx.workflowUUID;
828
815
  const wrappedTransaction = async (client) => {
829
816
  const tCtxt = new transaction_1.TransactionContextImpl(this.userDatabase.getName(), client, wfCtx, span, this.logger, funcId, txn.name);
@@ -834,7 +821,7 @@ class DBOSExecutor {
834
821
  const check = await this.#checkExecution(func, workflowUUID, funcId);
835
822
  txn_snapshot = check.txn_snapshot;
836
823
  if (check.output !== exports.dbosNull) {
837
- tCtxt.span.setAttribute("cached", true);
824
+ tCtxt.span.setAttribute('cached', true);
838
825
  tCtxt.span.setStatus({ code: api_1.SpanStatusCode.OK });
839
826
  this.tracer.endSpan(tCtxt.span);
840
827
  return check.output;
@@ -881,7 +868,7 @@ class DBOSExecutor {
881
868
  // Synchronously record the output of write transactions and obtain the transaction ID.
882
869
  const func = (sql, args) => this.userDatabase.queryWithClient(client, sql, ...args);
883
870
  const pg_txn_id = await this.#recordOutput(func, wfCtx.workflowUUID, funcId, txn_snapshot, result, (error) => this.userDatabase.isKeyConflictError(error));
884
- tCtxt.span.setAttribute("pg_txn_id", pg_txn_id);
871
+ tCtxt.span.setAttribute('pg_txn_id', pg_txn_id);
885
872
  wfCtx.resultBuffer.clear();
886
873
  }
887
874
  catch (error) {
@@ -908,7 +895,7 @@ class DBOSExecutor {
908
895
  }
909
896
  if (this.userDatabase.isRetriableTransactionError(err)) {
910
897
  // serialization_failure in PostgreSQL
911
- span.addEvent("TXN SERIALIZATION FAILURE", { "retryWaitMillis": retryWaitMillis }, performance.now());
898
+ span.addEvent('TXN SERIALIZATION FAILURE', { retryWaitMillis: retryWaitMillis }, performance.now());
912
899
  // Retry serialization failures.
913
900
  await (0, utils_1.sleepms)(retryWaitMillis);
914
901
  retryWaitMillis *= backoffFactor;
@@ -981,15 +968,15 @@ class DBOSExecutor {
981
968
  const maxRetryWaitMs = 2000; // Maximum wait 2 seconds.
982
969
  const readOnly = procInfo.config.readOnly ?? false;
983
970
  while (true) {
984
- let txn_snapshot = "invalid";
971
+ let txn_snapshot = 'invalid';
985
972
  const wrappedProcedure = async (client) => {
986
973
  const ctxt = new procedure_1.StoredProcedureContextImpl(client, wfCtx, span, this.logger, funcId, proc.name);
987
974
  if (wfCtx.presetUUID) {
988
- const func = (sql, args) => this.procedurePool.query(sql, args).then(v => v.rows);
975
+ const func = (sql, args) => this.procedurePool.query(sql, args).then((v) => v.rows);
989
976
  const check = await this.#checkExecution(func, wfCtx.workflowUUID, funcId);
990
977
  txn_snapshot = check.txn_snapshot;
991
978
  if (check.output !== exports.dbosNull) {
992
- ctxt.span.setAttribute("cached", true);
979
+ ctxt.span.setAttribute('cached', true);
993
980
  ctxt.span.setStatus({ code: api_1.SpanStatusCode.OK });
994
981
  this.tracer.endSpan(ctxt.span);
995
982
  return check.output;
@@ -997,7 +984,7 @@ class DBOSExecutor {
997
984
  }
998
985
  else {
999
986
  // Collect snapshot information for read-only transactions and non-preset UUID transactions, if not already collected above
1000
- const func = (sql, args) => this.procedurePool.query(sql, args).then(v => v.rows);
987
+ const func = (sql, args) => this.procedurePool.query(sql, args).then((v) => v.rows);
1001
988
  txn_snapshot = await DBOSExecutor.#retrieveSnapshot(func);
1002
989
  }
1003
990
  if (this.debugMode) {
@@ -1031,23 +1018,25 @@ class DBOSExecutor {
1031
1018
  }
1032
1019
  else {
1033
1020
  // Synchronously record the output of write transactions and obtain the transaction ID.
1034
- const func = (sql, args) => client.query(sql, args).then(v => v.rows);
1021
+ const func = (sql, args) => client.query(sql, args).then((v) => v.rows);
1035
1022
  const pg_txn_id = await this.#recordOutput(func, wfCtx.workflowUUID, funcId, txn_snapshot, result, user_database_1.pgNodeIsKeyConflictError);
1036
1023
  // const pg_txn_id = await wfCtx.recordOutputProc<R>(client, funcId, txn_snapshot, result);
1037
- ctxt.span.setAttribute("pg_txn_id", pg_txn_id);
1024
+ ctxt.span.setAttribute('pg_txn_id', pg_txn_id);
1038
1025
  wfCtx.resultBuffer.clear();
1039
1026
  }
1040
1027
  return result;
1041
1028
  };
1042
1029
  try {
1043
- const result = await this.invokeStoredProcFunction(wrappedProcedure, { isolationLevel: procInfo.config.isolationLevel });
1030
+ const result = await this.invokeStoredProcFunction(wrappedProcedure, {
1031
+ isolationLevel: procInfo.config.isolationLevel,
1032
+ });
1044
1033
  span.setStatus({ code: api_1.SpanStatusCode.OK });
1045
1034
  return result;
1046
1035
  }
1047
1036
  catch (err) {
1048
1037
  if (this.userDatabase.isRetriableTransactionError(err)) {
1049
1038
  // serialization_failure in PostgreSQL
1050
- span.addEvent("TXN SERIALIZATION FAILURE", { "retryWaitMillis": retryWaitMillis }, performance.now());
1039
+ span.addEvent('TXN SERIALIZATION FAILURE', { retryWaitMillis: retryWaitMillis }, performance.now());
1051
1040
  // Retry serialization failures.
1052
1041
  await (0, utils_1.sleepms)(retryWaitMillis);
1053
1042
  retryWaitMillis *= backoffFactor;
@@ -1058,7 +1047,7 @@ class DBOSExecutor {
1058
1047
  const e = err;
1059
1048
  await this.invokeStoredProcFunction(async (client) => {
1060
1049
  await this.#flushResultBufferProc(client, wfCtx.resultBuffer, wfCtx.workflowUUID);
1061
- const func = (sql, args) => client.query(sql, args).then(v => v.rows);
1050
+ const func = (sql, args) => client.query(sql, args).then((v) => v.rows);
1062
1051
  await this.#recordError(func, wfCtx.workflowUUID, funcId, txn_snapshot, e, user_database_1.pgNodeIsKeyConflictError);
1063
1052
  }, { isolationLevel: transaction_1.IsolationLevel.ReadCommitted });
1064
1053
  await this.userDatabase.transaction(async (client) => {
@@ -1118,7 +1107,7 @@ class DBOSExecutor {
1118
1107
  wfCtx.resultBuffer.clear();
1119
1108
  }
1120
1109
  if (txn_id) {
1121
- span.setAttribute("pg_txn_id", txn_id);
1110
+ span.setAttribute('pg_txn_id', txn_id);
1122
1111
  }
1123
1112
  span.setStatus({ code: api_1.SpanStatusCode.OK });
1124
1113
  return output;
@@ -1128,13 +1117,11 @@ class DBOSExecutor {
1128
1117
  const log = (msg) => this.#logNotice(msg);
1129
1118
  const procClassName = this.getProcedureClassName(proc);
1130
1119
  const plainProcName = `${procClassName}_${proc.name}_p`;
1131
- const procName = this.config.appVersion
1132
- ? `v${this.config.appVersion}_${plainProcName}`
1133
- : plainProcName;
1120
+ const procName = this.config.appVersion ? `v${this.config.appVersion}_${plainProcName}` : plainProcName;
1134
1121
  const sql = `CALL "${procName}"(${args.map((_v, i) => `$${i + 1}`).join()});`;
1135
1122
  try {
1136
1123
  client.on('notice', log);
1137
- return await client.query(sql, args).then(value => value.rows);
1124
+ return await client.query(sql, args).then((value) => value.rows);
1138
1125
  }
1139
1126
  finally {
1140
1127
  client.off('notice', log);
@@ -1206,7 +1193,7 @@ class DBOSExecutor {
1206
1193
  // Check if this execution previously happened, returning its original result if it did.
1207
1194
  const check = await this.systemDatabase.checkOperationOutput(wfCtx.workflowUUID, ctxt.functionID);
1208
1195
  if (check !== exports.dbosNull) {
1209
- ctxt.span.setAttribute("cached", true);
1196
+ ctxt.span.setAttribute('cached', true);
1210
1197
  ctxt.span.setStatus({ code: api_1.SpanStatusCode.OK });
1211
1198
  this.tracer.endSpan(ctxt.span);
1212
1199
  return check;
@@ -1245,7 +1232,7 @@ class DBOSExecutor {
1245
1232
  const e = error;
1246
1233
  errors.push(e);
1247
1234
  this.logger.warn(`Error in step being automatically retried. Attempt ${numAttempts} of ${ctxt.maxAttempts}. ${e.stack}`);
1248
- span.addEvent(`Step attempt ${numAttempts + 1} failed`, { "retryIntervalSeconds": intervalSeconds, "error": error.message }, performance.now());
1235
+ span.addEvent(`Step attempt ${numAttempts + 1} failed`, { retryIntervalSeconds: intervalSeconds, error: error.message }, performance.now());
1249
1236
  if (numAttempts < ctxt.maxAttempts) {
1250
1237
  // Sleep for an interval, then increase the interval by backoffRate.
1251
1238
  // Cap at the maximum allowed retry interval.
@@ -1300,7 +1287,9 @@ class DBOSExecutor {
1300
1287
  };
1301
1288
  const workflowUUID = idempotencyKey ? destinationUUID + idempotencyKey : undefined;
1302
1289
  return (await this.workflow(temp_workflow, {
1303
- workflowUUID: workflowUUID, tempWfType: TempWorkflowType.send, configuredInstance: null,
1290
+ workflowUUID: workflowUUID,
1291
+ tempWfType: TempWorkflowType.send,
1292
+ configuredInstance: null,
1304
1293
  }, destinationUUID, message, topic)).getResult();
1305
1294
  }
1306
1295
  /**
@@ -1337,7 +1326,7 @@ class DBOSExecutor {
1337
1326
  for (const nname of channels) {
1338
1327
  await notificationsClient.query(`LISTEN ${nname};`);
1339
1328
  }
1340
- notificationsClient.on("notification", callback);
1329
+ notificationsClient.on('notification', callback);
1341
1330
  return {
1342
1331
  close: async () => {
1343
1332
  for (const nname of channels) {
@@ -1349,7 +1338,7 @@ class DBOSExecutor {
1349
1338
  }
1350
1339
  notificationsClient.release();
1351
1340
  }
1352
- }
1341
+ },
1353
1342
  };
1354
1343
  }
1355
1344
  /* INTERNAL HELPERS */
@@ -1360,33 +1349,39 @@ class DBOSExecutor {
1360
1349
  * A recovery process that by default runs during executor init time.
1361
1350
  * It runs to completion all pending workflows that were executing when the previous executor failed.
1362
1351
  */
1363
- async recoverPendingWorkflows(executorIDs = ["local"]) {
1352
+ async recoverPendingWorkflows(executorIDs = ['local']) {
1364
1353
  if (this.debugMode) {
1365
- throw new error_1.DBOSDebuggerError("Cannot recover pending workflows in debug mode.");
1354
+ throw new error_1.DBOSDebuggerError('Cannot recover pending workflows in debug mode.');
1366
1355
  }
1367
- const pendingWorkflows = [];
1356
+ const handlerArray = [];
1368
1357
  for (const execID of executorIDs) {
1369
- if (execID === "local" && process.env.DBOS__VMID) {
1358
+ if (execID === 'local' && process.env.DBOS__VMID) {
1370
1359
  this.logger.debug(`Skip local recovery because it's running in a VM: ${process.env.DBOS__VMID}`);
1371
1360
  continue;
1372
1361
  }
1373
- this.logger.debug(`Recovering workflows of executor: ${execID}`);
1374
- const wIDs = await this.systemDatabase.getPendingWorkflows(execID);
1375
- pendingWorkflows.push(...wIDs);
1376
- }
1377
- const handlerArray = [];
1378
- for (const workflowUUID of pendingWorkflows) {
1379
- try {
1380
- handlerArray.push(await this.executeWorkflowUUID(workflowUUID));
1381
- }
1382
- catch (e) {
1383
- this.logger.warn(`Recovery of workflow ${workflowUUID} failed: ${e.message}`);
1362
+ this.logger.debug(`Recovering workflows assigned to executor: ${execID}`);
1363
+ const pendingWorkflows = await this.systemDatabase.getPendingWorkflows(execID);
1364
+ for (const pendingWorkflow of pendingWorkflows) {
1365
+ this.logger.debug(`Recovering workflow: ${pendingWorkflow.workflowUUID}. Queue name: ${pendingWorkflow.queueName}`);
1366
+ try {
1367
+ // If the workflow is member of a queue, re-enqueue it.
1368
+ if (pendingWorkflow.queueName) {
1369
+ await this.systemDatabase.clearQueueAssignment(pendingWorkflow.workflowUUID);
1370
+ handlerArray.push(this.retrieveWorkflow(pendingWorkflow.workflowUUID));
1371
+ }
1372
+ else {
1373
+ handlerArray.push(await this.executeWorkflowUUID(pendingWorkflow.workflowUUID));
1374
+ }
1375
+ }
1376
+ catch (e) {
1377
+ this.logger.warn(`Recovery of workflow ${pendingWorkflow.workflowUUID} failed: ${e.message}`);
1378
+ }
1384
1379
  }
1385
1380
  }
1386
1381
  return handlerArray;
1387
1382
  }
1388
1383
  async deactivateEventReceivers() {
1389
- this.logger.info("Deactivating event receivers");
1384
+ this.logger.info('Deactivating event receivers');
1390
1385
  for (const evtRcvr of this.eventReceivers || []) {
1391
1386
  try {
1392
1387
  await evtRcvr.destroy();
@@ -1425,15 +1420,18 @@ class DBOSExecutor {
1425
1420
  const workflowStartUUID = startNewWorkflow ? undefined : workflowUUID;
1426
1421
  if (wfInfo) {
1427
1422
  return this.workflow(wfInfo.workflow, {
1428
- workflowUUID: workflowStartUUID, parentCtx: parentCtx, configuredInstance: configuredInst, recovery: true,
1429
- queueName: wfStatus.queueName, executeWorkflow: true,
1423
+ workflowUUID: workflowStartUUID,
1424
+ parentCtx: parentCtx,
1425
+ configuredInstance: configuredInst,
1426
+ queueName: wfStatus.queueName,
1427
+ executeWorkflow: true,
1430
1428
  },
1431
1429
  // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
1432
1430
  ...inputs);
1433
1431
  }
1434
1432
  // Should be temporary workflows. Parse the name of the workflow.
1435
1433
  const wfName = wfStatus.workflowName;
1436
- const nameArr = wfName.split("-");
1434
+ const nameArr = wfName.split('-');
1437
1435
  if (!nameArr[0].startsWith(DBOSExecutor.tempWorkflowName)) {
1438
1436
  // CB - Doesn't this happen if the user changed the function name in their code?
1439
1437
  throw new error_1.DBOSError(`This should never happen! Cannot find workflow info for a non-temporary workflow! UUID ${workflowUUID}, name ${wfName}`);
@@ -1485,8 +1483,12 @@ class DBOSExecutor {
1485
1483
  throw new error_1.DBOSNotRegisteredError(wfName);
1486
1484
  }
1487
1485
  return this.workflow(temp_workflow, {
1488
- workflowUUID: workflowStartUUID, parentCtx: parentCtx ?? undefined, configuredInstance: clsinst,
1489
- recovery: true, tempWfType, tempWfClass, tempWfName,
1486
+ workflowUUID: workflowStartUUID,
1487
+ parentCtx: parentCtx ?? undefined,
1488
+ configuredInstance: clsinst,
1489
+ tempWfType,
1490
+ tempWfClass,
1491
+ tempWfName,
1490
1492
  },
1491
1493
  // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
1492
1494
  ...inputs);
@@ -1510,6 +1512,13 @@ class DBOSExecutor {
1510
1512
  oc.workflowUUID = workflowUUID;
1511
1513
  return oc;
1512
1514
  }
1515
+ async cancelWorkflow(workflowID) {
1516
+ await this.systemDatabase.cancelWorkflow(workflowID);
1517
+ }
1518
+ async resumeWorkflow(workflowID) {
1519
+ await this.systemDatabase.resumeWorkflow(workflowID);
1520
+ return await this.executeWorkflowUUID(workflowID, false);
1521
+ }
1513
1522
  /* BACKGROUND PROCESSES */
1514
1523
  /**
1515
1524
  * Periodically flush the workflow output buffer to the system database.
@@ -1532,7 +1541,7 @@ class DBOSExecutor {
1532
1541
  try {
1533
1542
  let finishedCnt = 0;
1534
1543
  while (finishedCnt < totalSize) {
1535
- let sqlStmt = "INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, output, error, txn_id, txn_snapshot, created_at) VALUES ";
1544
+ let sqlStmt = 'INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, output, error, txn_id, txn_snapshot, created_at) VALUES ';
1536
1545
  let paramCnt = 1;
1537
1546
  const values = [];
1538
1547
  const batchUUIDs = [];
@@ -1542,7 +1551,7 @@ class DBOSExecutor {
1542
1551
  const txnSnapshot = recorded.txn_snapshot;
1543
1552
  const createdAt = recorded.created_at;
1544
1553
  if (paramCnt > 1) {
1545
- sqlStmt += ", ";
1554
+ sqlStmt += ', ';
1546
1555
  }
1547
1556
  sqlStmt += `($${paramCnt++}, $${paramCnt++}, $${paramCnt++}, $${paramCnt++}, null, $${paramCnt++}, $${paramCnt++})`;
1548
1557
  values.push(workflowUUID, funcID, utils_1.DBOSJSON.stringify(output), utils_1.DBOSJSON.stringify(null), txnSnapshot, createdAt);
@@ -1558,7 +1567,9 @@ class DBOSExecutor {
1558
1567
  // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
1559
1568
  await this.userDatabase.query(sqlStmt, ...values);
1560
1569
  // Clean up after each batch succeeds
1561
- batchUUIDs.forEach((value) => { localBuffer.delete(value); });
1570
+ batchUUIDs.forEach((value) => {
1571
+ localBuffer.delete(value);
1572
+ });
1562
1573
  }
1563
1574
  }
1564
1575
  catch (error) {
@@ -1573,14 +1584,14 @@ class DBOSExecutor {
1573
1584
  }
1574
1585
  }
1575
1586
  logRegisteredHTTPUrls() {
1576
- this.logger.info("HTTP endpoints supported:");
1587
+ this.logger.info('HTTP endpoints supported:');
1577
1588
  this.registeredOperations.forEach((registeredOperation) => {
1578
1589
  const ro = registeredOperation;
1579
1590
  if (ro.apiURL) {
1580
- this.logger.info(" " + ro.apiType.padEnd(6) + " : " + ro.apiURL);
1591
+ this.logger.info(' ' + ro.apiType.padEnd(6) + ' : ' + ro.apiURL);
1581
1592
  const roles = ro.getRequiredRoles();
1582
1593
  if (roles.length > 0) {
1583
- this.logger.info(" Required Roles: " + utils_1.DBOSJSON.stringify(roles));
1594
+ this.logger.info(' Required Roles: ' + utils_1.DBOSJSON.stringify(roles));
1584
1595
  }
1585
1596
  }
1586
1597
  });