@dbos-inc/dbos-sdk 3.5.7 → 3.5.44-preview.gc094fdab44
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/{httpServer/server.d.ts → adminserver.d.ts} +6 -15
- package/dist/src/adminserver.d.ts.map +1 -0
- package/dist/src/{httpServer/server.js → adminserver.js} +28 -311
- package/dist/src/adminserver.js.map +1 -0
- package/dist/src/{dbos-runtime → cli}/cli.d.ts +1 -1
- package/dist/src/cli/cli.d.ts.map +1 -0
- package/dist/src/{dbos-runtime → cli}/cli.js +3 -12
- package/dist/src/cli/cli.js.map +1 -0
- package/dist/src/cli/commands.d.ts.map +1 -0
- package/dist/src/cli/commands.js.map +1 -0
- package/dist/src/cli/docker_pg_helper.d.ts.map +1 -0
- package/dist/src/cli/docker_pg_helper.js.map +1 -0
- package/dist/src/cli/migrate.d.ts +3 -0
- package/dist/src/cli/migrate.d.ts.map +1 -0
- package/dist/src/{dbos-runtime → cli}/migrate.js +1 -29
- package/dist/src/cli/migrate.js.map +1 -0
- package/dist/src/client.d.ts +2 -4
- package/dist/src/client.d.ts.map +1 -1
- package/dist/src/client.js +6 -19
- package/dist/src/client.js.map +1 -1
- package/dist/src/conductor/conductor.js +1 -1
- package/dist/src/conductor/conductor.js.map +1 -1
- package/dist/src/{dbos-runtime/config.d.ts → config.d.ts} +2 -6
- package/dist/src/config.d.ts.map +1 -0
- package/dist/src/{dbos-runtime/config.js → config.js} +9 -43
- package/dist/src/config.js.map +1 -0
- package/dist/src/context.d.ts +0 -3
- package/dist/src/context.d.ts.map +1 -1
- package/dist/src/context.js.map +1 -1
- package/dist/src/dbos-executor.d.ts +2 -27
- package/dist/src/dbos-executor.d.ts.map +1 -1
- package/dist/src/dbos-executor.js +21 -787
- package/dist/src/dbos-executor.js.map +1 -1
- package/dist/src/dbos.d.ts +2 -196
- package/dist/src/dbos.d.ts.map +1 -1
- package/dist/src/dbos.js +11 -407
- package/dist/src/dbos.js.map +1 -1
- package/dist/src/decorators.d.ts +4 -38
- package/dist/src/decorators.d.ts.map +1 -1
- package/dist/src/decorators.js +9 -164
- package/dist/src/decorators.js.map +1 -1
- package/dist/src/error.d.ts +0 -8
- package/dist/src/error.d.ts.map +1 -1
- package/dist/src/error.js +1 -14
- package/dist/src/error.js.map +1 -1
- package/dist/src/index.d.ts +3 -12
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/index.js +1 -40
- package/dist/src/index.js.map +1 -1
- package/dist/src/utils.d.ts +1 -22
- package/dist/src/utils.d.ts.map +1 -1
- package/dist/src/utils.js +20 -172
- package/dist/src/utils.js.map +1 -1
- package/dist/src/wfqueue.d.ts +0 -2
- package/dist/src/wfqueue.d.ts.map +1 -1
- package/dist/src/wfqueue.js +1 -1
- package/dist/src/wfqueue.js.map +1 -1
- package/dist/src/workflow.d.ts +0 -8
- package/dist/src/workflow.d.ts.map +1 -1
- package/dist/src/workflow.js +0 -11
- package/dist/src/workflow.js.map +1 -1
- package/dist/src/{dbos-runtime/workflow_management.d.ts → workflow_management.d.ts} +4 -6
- package/dist/src/workflow_management.d.ts.map +1 -0
- package/dist/src/{dbos-runtime/workflow_management.js → workflow_management.js} +5 -26
- package/dist/src/workflow_management.js.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +4 -5
- package/dist/schemas/user_db_schema.d.ts +0 -15
- package/dist/schemas/user_db_schema.d.ts.map +0 -1
- package/dist/schemas/user_db_schema.js +0 -33
- package/dist/schemas/user_db_schema.js.map +0 -1
- package/dist/src/dbos-runtime/cli.d.ts.map +0 -1
- package/dist/src/dbos-runtime/cli.js.map +0 -1
- package/dist/src/dbos-runtime/commands.d.ts.map +0 -1
- package/dist/src/dbos-runtime/commands.js.map +0 -1
- package/dist/src/dbos-runtime/config.d.ts.map +0 -1
- package/dist/src/dbos-runtime/config.js.map +0 -1
- package/dist/src/dbos-runtime/docker_pg_helper.d.ts.map +0 -1
- package/dist/src/dbos-runtime/docker_pg_helper.js.map +0 -1
- package/dist/src/dbos-runtime/migrate.d.ts +0 -3
- package/dist/src/dbos-runtime/migrate.d.ts.map +0 -1
- package/dist/src/dbos-runtime/migrate.js.map +0 -1
- package/dist/src/dbos-runtime/workflow_management.d.ts.map +0 -1
- package/dist/src/dbos-runtime/workflow_management.js.map +0 -1
- package/dist/src/httpServer/handler.d.ts +0 -13
- package/dist/src/httpServer/handler.d.ts.map +0 -1
- package/dist/src/httpServer/handler.js +0 -25
- package/dist/src/httpServer/handler.js.map +0 -1
- package/dist/src/httpServer/handlerTypes.d.ts +0 -14
- package/dist/src/httpServer/handlerTypes.d.ts.map +0 -1
- package/dist/src/httpServer/handlerTypes.js +0 -22
- package/dist/src/httpServer/handlerTypes.js.map +0 -1
- package/dist/src/httpServer/middleware.d.ts +0 -74
- package/dist/src/httpServer/middleware.d.ts.map +0 -1
- package/dist/src/httpServer/middleware.js +0 -108
- package/dist/src/httpServer/middleware.js.map +0 -1
- package/dist/src/httpServer/server.d.ts.map +0 -1
- package/dist/src/httpServer/server.js.map +0 -1
- package/dist/src/paramdecorators.d.ts +0 -39
- package/dist/src/paramdecorators.d.ts.map +0 -1
- package/dist/src/paramdecorators.js +0 -365
- package/dist/src/paramdecorators.js.map +0 -1
- package/dist/src/procedure.d.ts +0 -18
- package/dist/src/procedure.d.ts.map +0 -1
- package/dist/src/procedure.js +0 -3
- package/dist/src/procedure.js.map +0 -1
- package/dist/src/transaction.d.ts +0 -19
- package/dist/src/transaction.d.ts.map +0 -1
- package/dist/src/transaction.js +0 -11
- package/dist/src/transaction.js.map +0 -1
- package/dist/src/user_database.d.ts +0 -176
- package/dist/src/user_database.d.ts.map +0 -1
- package/dist/src/user_database.js +0 -540
- package/dist/src/user_database.js.map +0 -1
- /package/dist/src/{dbos-runtime → cli}/commands.d.ts +0 -0
- /package/dist/src/{dbos-runtime → cli}/commands.js +0 -0
- /package/dist/src/{dbos-runtime → cli}/docker_pg_helper.d.ts +0 -0
- /package/dist/src/{dbos-runtime → cli}/docker_pg_helper.js +0 -0
@@ -22,38 +22,27 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
22
22
|
__setModuleDefault(result, mod);
|
23
23
|
return result;
|
24
24
|
};
|
25
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
26
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
27
|
-
};
|
28
25
|
Object.defineProperty(exports, "__esModule", { value: true });
|
29
26
|
exports.DBOSExecutor = exports.TempWorkflowType = exports.OperationType = exports.DBOS_QUEUE_MAX_PRIORITY = exports.DBOS_QUEUE_MIN_PRIORITY = void 0;
|
30
27
|
const error_1 = require("./error");
|
31
28
|
const workflow_1 = require("./workflow");
|
32
|
-
const transaction_1 = require("./transaction");
|
33
29
|
const collector_1 = require("./telemetry/collector");
|
34
30
|
const traces_1 = require("./telemetry/traces");
|
35
31
|
const logs_1 = require("./telemetry/logs");
|
36
32
|
const exporters_1 = require("./telemetry/exporters");
|
37
|
-
const pg_1 = require("pg");
|
38
33
|
const system_database_1 = require("./system_database");
|
39
34
|
const node_crypto_1 = require("node:crypto");
|
40
|
-
const user_database_1 = require("./user_database");
|
41
35
|
const decorators_1 = require("./decorators");
|
42
36
|
const api_1 = require("@opentelemetry/api");
|
43
|
-
const knex_1 = __importDefault(require("knex"));
|
44
37
|
const context_1 = require("./context");
|
45
38
|
const serialize_error_1 = require("serialize-error");
|
46
39
|
const utils_1 = require("./utils");
|
47
|
-
const node_path_1 = __importDefault(require("node:path"));
|
48
|
-
const node_fs_1 = __importDefault(require("node:fs"));
|
49
|
-
const url_1 = require("url");
|
50
40
|
const _1 = require(".");
|
51
41
|
const wfqueue_1 = require("./wfqueue");
|
52
42
|
const debugpoint_1 = require("./debugpoint");
|
53
43
|
const scheduler_1 = require("./scheduler/scheduler");
|
54
44
|
const crypto = __importStar(require("crypto"));
|
55
|
-
const workflow_management_1 = require("./
|
56
|
-
const utils_2 = require("./utils");
|
45
|
+
const workflow_management_1 = require("./workflow_management");
|
57
46
|
const database_utils_1 = require("./database_utils");
|
58
47
|
const debouncer_1 = require("./debouncer");
|
59
48
|
const dbosNull = {};
|
@@ -64,20 +53,14 @@ exports.OperationType = {
|
|
64
53
|
WORKFLOW: 'workflow',
|
65
54
|
TRANSACTION: 'transaction',
|
66
55
|
STEP: 'step',
|
67
|
-
PROCEDURE: 'procedure',
|
68
56
|
};
|
69
57
|
exports.TempWorkflowType = {
|
70
|
-
transaction: 'transaction',
|
71
|
-
procedure: 'procedure',
|
72
58
|
step: 'step',
|
73
59
|
send: 'send',
|
74
60
|
};
|
75
61
|
class DBOSExecutor {
|
76
62
|
config;
|
77
63
|
initialized;
|
78
|
-
// User Database
|
79
|
-
#userDatabase = undefined;
|
80
|
-
#procedurePool = undefined;
|
81
64
|
// System Database
|
82
65
|
systemDatabase;
|
83
66
|
// Temporary workflows are created by calling transaction/send/recv directly from the executor class
|
@@ -89,42 +72,9 @@ class DBOSExecutor {
|
|
89
72
|
logger;
|
90
73
|
ctxLogger;
|
91
74
|
tracer;
|
92
|
-
// eslint-disable-next-line @typescript-eslint/no-unsafe-function-type
|
93
|
-
#typeormEntities = [];
|
94
|
-
#drizzleEntities = {};
|
95
|
-
#scheduler = new scheduler_1.ScheduledReceiver();
|
96
75
|
#wfqEnded = undefined;
|
97
76
|
executorID = utils_1.globalParams.executorID;
|
98
77
|
static globalInstance = undefined;
|
99
|
-
static async loadClasses(entrypoints) {
|
100
|
-
const allClasses = [];
|
101
|
-
for (const entrypoint of entrypoints) {
|
102
|
-
const operations = node_path_1.default.isAbsolute(entrypoint) ? entrypoint : node_path_1.default.join(process.cwd(), entrypoint);
|
103
|
-
let exports;
|
104
|
-
if (node_fs_1.default.existsSync(operations)) {
|
105
|
-
const operationsURL = (0, url_1.pathToFileURL)(operations).href;
|
106
|
-
exports = (await import(operationsURL));
|
107
|
-
}
|
108
|
-
else {
|
109
|
-
throw new error_1.DBOSFailLoadOperationsError(`Failed to load operations from the entrypoint ${entrypoint}`);
|
110
|
-
}
|
111
|
-
const classes = [];
|
112
|
-
for (const key in exports) {
|
113
|
-
const $export = exports[key];
|
114
|
-
if (isObject($export)) {
|
115
|
-
classes.push($export);
|
116
|
-
}
|
117
|
-
}
|
118
|
-
allClasses.push(...classes);
|
119
|
-
}
|
120
|
-
if (allClasses.length === 0) {
|
121
|
-
throw new error_1.DBOSFailLoadOperationsError('operations not found');
|
122
|
-
}
|
123
|
-
return allClasses;
|
124
|
-
function isObject(value) {
|
125
|
-
return typeof value === 'function' || (typeof value === 'object' && value !== null);
|
126
|
-
}
|
127
|
-
}
|
128
78
|
/* WORKFLOW EXECUTOR LIFE CYCLE MANAGEMENT */
|
129
79
|
constructor(config, { systemDatabase, debugMode } = {}) {
|
130
80
|
this.config = config;
|
@@ -143,7 +93,6 @@ class DBOSExecutor {
|
|
143
93
|
if (this.#debugMode) {
|
144
94
|
this.logger.info('Running in debug mode!');
|
145
95
|
}
|
146
|
-
this.#procedurePool = this.config.userDbClient ? new pg_1.Pool((0, utils_2.getClientConfig)(this.config.databaseUrl)) : undefined;
|
147
96
|
if (systemDatabase) {
|
148
97
|
this.logger.debug('Using provided system database'); // XXX print the name or something
|
149
98
|
this.systemDatabase = systemDatabase;
|
@@ -152,129 +101,19 @@ class DBOSExecutor {
|
|
152
101
|
this.logger.debug('Using Postgres system database');
|
153
102
|
this.systemDatabase = new system_database_1.PostgresSystemDatabase(this.config.systemDatabaseUrl, this.logger, this.config.sysDbPoolSize);
|
154
103
|
}
|
104
|
+
new scheduler_1.ScheduledReceiver(); // Create the scheduler, which registers itself.
|
155
105
|
this.initialized = false;
|
156
106
|
DBOSExecutor.globalInstance = this;
|
157
107
|
}
|
158
108
|
get appName() {
|
159
109
|
return this.config.name;
|
160
110
|
}
|
161
|
-
|
162
|
-
const userDbClient = this.config.userDbClient;
|
163
|
-
const userDBConfig = (0, utils_2.getClientConfig)(this.config.databaseUrl);
|
164
|
-
userDBConfig.max = this.config.userDbPoolSize ?? 20;
|
165
|
-
if (userDbClient === user_database_1.UserDatabaseName.PRISMA) {
|
166
|
-
// TODO: make Prisma work with debugger proxy.
|
167
|
-
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-require-imports
|
168
|
-
const { PrismaClient } = require(node_path_1.default.join(process.cwd(), 'node_modules', '@prisma', 'client')); // Find the prisma client in the node_modules of the current project
|
169
|
-
this.#userDatabase = new user_database_1.PrismaUserDatabase(
|
170
|
-
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-call
|
171
|
-
new PrismaClient({
|
172
|
-
datasources: {
|
173
|
-
db: {
|
174
|
-
url: userDBConfig.connectionString,
|
175
|
-
},
|
176
|
-
},
|
177
|
-
}));
|
178
|
-
this.logger.debug('Loaded Prisma user database');
|
179
|
-
}
|
180
|
-
else if (userDbClient === user_database_1.UserDatabaseName.TYPEORM) {
|
181
|
-
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-require-imports
|
182
|
-
const DataSourceExports = require('typeorm');
|
183
|
-
try {
|
184
|
-
this.#userDatabase = new user_database_1.TypeORMDatabase(
|
185
|
-
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
|
186
|
-
new DataSourceExports.DataSource({
|
187
|
-
type: 'postgres',
|
188
|
-
url: userDBConfig.connectionString,
|
189
|
-
connectTimeoutMS: userDBConfig.connectionTimeoutMillis,
|
190
|
-
entities: this.#typeormEntities,
|
191
|
-
poolSize: userDBConfig.max,
|
192
|
-
}));
|
193
|
-
}
|
194
|
-
catch (s) {
|
195
|
-
s.message = `Error loading TypeORM user database: ${s.message}`;
|
196
|
-
this.logger.error(s);
|
197
|
-
}
|
198
|
-
this.logger.debug('Loaded TypeORM user database');
|
199
|
-
}
|
200
|
-
else if (userDbClient === user_database_1.UserDatabaseName.KNEX) {
|
201
|
-
const knexConfig = {
|
202
|
-
client: 'postgres',
|
203
|
-
connection: (0, utils_2.getClientConfig)(this.config.databaseUrl),
|
204
|
-
pool: {
|
205
|
-
min: 0,
|
206
|
-
max: userDBConfig.max,
|
207
|
-
},
|
208
|
-
};
|
209
|
-
this.#userDatabase = new user_database_1.KnexUserDatabase((0, knex_1.default)(knexConfig));
|
210
|
-
this.logger.debug('Loaded Knex user database');
|
211
|
-
}
|
212
|
-
else if (userDbClient === user_database_1.UserDatabaseName.DRIZZLE) {
|
213
|
-
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-require-imports
|
214
|
-
const DrizzleExports = require('drizzle-orm/node-postgres');
|
215
|
-
const drizzlePool = new pg_1.Pool(userDBConfig);
|
216
|
-
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
|
217
|
-
const drizzle = DrizzleExports.drizzle(drizzlePool, { schema: this.#drizzleEntities });
|
218
|
-
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
219
|
-
this.#userDatabase = new user_database_1.DrizzleUserDatabase(drizzlePool, drizzle);
|
220
|
-
this.logger.debug('Loaded Drizzle user database');
|
221
|
-
}
|
222
|
-
else {
|
223
|
-
this.#userDatabase = new user_database_1.PGNodeUserDatabase(userDBConfig);
|
224
|
-
this.logger.debug('Loaded Postgres user database');
|
225
|
-
}
|
226
|
-
}
|
227
|
-
async init(classes) {
|
111
|
+
async init() {
|
228
112
|
if (this.initialized) {
|
229
113
|
this.logger.error('Workflow executor already initialized!');
|
230
114
|
return;
|
231
115
|
}
|
232
|
-
let classnames = [];
|
233
|
-
if (!classes || !classes.length) {
|
234
|
-
classnames = (0, decorators_1.getAllRegisteredClassNames)();
|
235
|
-
}
|
236
|
-
else {
|
237
|
-
classnames = classes.map((c) => (0, decorators_1.getNameForClass)(c));
|
238
|
-
}
|
239
116
|
try {
|
240
|
-
let length; // Track the length of the array (or number of keys of the object)
|
241
|
-
for (const clsname of classnames) {
|
242
|
-
const reg = (0, decorators_1.getClassRegistrationByName)(clsname);
|
243
|
-
/**
|
244
|
-
* With TSORM, we take an array of entities (Function[]) and add them to this.entities:
|
245
|
-
*/
|
246
|
-
if (Array.isArray(reg.ormEntities)) {
|
247
|
-
this.#typeormEntities = this.#typeormEntities.concat(reg.ormEntities);
|
248
|
-
length = reg.ormEntities.length;
|
249
|
-
}
|
250
|
-
else {
|
251
|
-
/**
|
252
|
-
* With Drizzle, we need to take an object of entities, since the object keys are used to access the entities from ctx.client.query:
|
253
|
-
*/
|
254
|
-
this.#drizzleEntities = { ...this.#drizzleEntities, ...reg.ormEntities };
|
255
|
-
length = Object.keys(reg.ormEntities).length;
|
256
|
-
}
|
257
|
-
this.logger.debug(`Loaded ${length} ORM entities`);
|
258
|
-
}
|
259
|
-
if (this.config.userDbClient) {
|
260
|
-
if (!this.#debugMode) {
|
261
|
-
const res = await (0, database_utils_1.ensurePGDatabase)({
|
262
|
-
urlToEnsure: this.config.databaseUrl,
|
263
|
-
logger: (msg) => this.logger.debug(msg),
|
264
|
-
});
|
265
|
-
if (res.status === 'failed') {
|
266
|
-
this.logger.warn(`Application database could not be verified / created: ${(0, database_utils_1.maskDatabaseUrl)(this.config.databaseUrl)}: ${res.message} ${res.hint ?? ''}\n ${res.notes.join('\n')}`);
|
267
|
-
}
|
268
|
-
}
|
269
|
-
this.#configureDbClient();
|
270
|
-
if (!this.#userDatabase) {
|
271
|
-
this.logger.error('No user database configured!');
|
272
|
-
throw new error_1.DBOSInitializationError('No user database configured!');
|
273
|
-
}
|
274
|
-
// Debug mode doesn't need to initialize the DBs. Everything should appear to be read-only.
|
275
|
-
await this.#userDatabase.init(this.#debugMode);
|
276
|
-
}
|
277
|
-
// Debug mode doesn't initialize the sys db
|
278
117
|
await this.systemDatabase.init(this.#debugMode);
|
279
118
|
}
|
280
119
|
catch (err) {
|
@@ -299,20 +138,6 @@ class DBOSExecutor {
|
|
299
138
|
this.initialized = true;
|
300
139
|
// Only execute init code if under non-debug mode
|
301
140
|
if (!this.#debugMode) {
|
302
|
-
for (const cls of classnames) {
|
303
|
-
// Init its configurations
|
304
|
-
const creg = (0, decorators_1.getClassRegistrationByName)(cls);
|
305
|
-
for (const [_cfgname, cfg] of creg.configuredInstances) {
|
306
|
-
await cfg.initialize(new _1.InitContext());
|
307
|
-
}
|
308
|
-
}
|
309
|
-
for (const v of (0, decorators_1.getAllRegisteredFunctions)()) {
|
310
|
-
const m = v;
|
311
|
-
if (m.init === true) {
|
312
|
-
this.logger.debug('Executing init method: ' + m.name);
|
313
|
-
await m.origFunction(new _1.InitContext());
|
314
|
-
}
|
315
|
-
}
|
316
141
|
// Compute the application version if not provided
|
317
142
|
if (utils_1.globalParams.appVersion === '') {
|
318
143
|
utils_1.globalParams.appVersion = this.computeAppVersion();
|
@@ -326,34 +151,10 @@ class DBOSExecutor {
|
|
326
151
|
}
|
327
152
|
this.logger.info('DBOS launched!');
|
328
153
|
}
|
329
|
-
#logNotice(msg) {
|
330
|
-
switch (msg.severity) {
|
331
|
-
case 'INFO':
|
332
|
-
case 'LOG':
|
333
|
-
case 'NOTICE':
|
334
|
-
this.logger.info(msg.message);
|
335
|
-
break;
|
336
|
-
case 'WARNING':
|
337
|
-
this.logger.warn(msg.message);
|
338
|
-
break;
|
339
|
-
case 'DEBUG':
|
340
|
-
this.logger.debug(msg.message);
|
341
|
-
break;
|
342
|
-
case 'ERROR':
|
343
|
-
case 'FATAL':
|
344
|
-
case 'PANIC':
|
345
|
-
this.logger.error(msg.message);
|
346
|
-
break;
|
347
|
-
default:
|
348
|
-
this.logger.error(`Unknown notice severity: ${msg.severity} - ${msg.message}`);
|
349
|
-
}
|
350
|
-
}
|
351
154
|
async destroy() {
|
352
155
|
try {
|
353
156
|
await this.systemDatabase.awaitRunningWorkflows();
|
354
157
|
await this.systemDatabase.destroy();
|
355
|
-
await this.#userDatabase?.destroy();
|
356
|
-
await this.#procedurePool?.end();
|
357
158
|
await this.logger.destroy();
|
358
159
|
if (DBOSExecutor.globalInstance === this) {
|
359
160
|
DBOSExecutor.globalInstance = undefined;
|
@@ -365,24 +166,6 @@ class DBOSExecutor {
|
|
365
166
|
throw err;
|
366
167
|
}
|
367
168
|
}
|
368
|
-
async createUserSchema() {
|
369
|
-
if (!this.#userDatabase) {
|
370
|
-
throw new Error('User database not enabled.');
|
371
|
-
}
|
372
|
-
await this.#userDatabase.createSchema();
|
373
|
-
}
|
374
|
-
async dropUserSchema() {
|
375
|
-
if (!this.#userDatabase) {
|
376
|
-
throw new Error('User database not enabled.');
|
377
|
-
}
|
378
|
-
await this.#userDatabase.dropSchema();
|
379
|
-
}
|
380
|
-
async queryUserDbFunction(queryFunction, ...params) {
|
381
|
-
if (!this.#userDatabase) {
|
382
|
-
throw new Error('User database not enabled.');
|
383
|
-
}
|
384
|
-
return await this.#userDatabase.queryFunction(queryFunction, ...params);
|
385
|
-
}
|
386
169
|
// This could return WF, or the function underlying a temp wf
|
387
170
|
#getFunctionInfoFromWFStatus(wf) {
|
388
171
|
const methReg = (0, decorators_1.getFunctionRegistrationByName)(wf.workflowClassName, wf.workflowName);
|
@@ -420,7 +203,7 @@ class DBOSExecutor {
|
|
420
203
|
if (params.queueName) {
|
421
204
|
const wfqueue = this.#getQueueByName(params.queueName);
|
422
205
|
if (!wfqueue.priorityEnabled && priority !== undefined) {
|
423
|
-
|
206
|
+
this.logger.warn(`Priority is not enabled for queue ${params.queueName}. Setting priority will not have any effect.`);
|
424
207
|
}
|
425
208
|
}
|
426
209
|
const pctx = { ...(0, context_1.getCurrentContextStore)() }; // function ID was already incremented...
|
@@ -446,8 +229,6 @@ class DBOSExecutor {
|
|
446
229
|
assumedRole: pctx?.assumedRole ?? '',
|
447
230
|
});
|
448
231
|
const isTempWorkflow = DBOSExecutor.#tempWorkflowName === wfname;
|
449
|
-
const funcArgs = (0, utils_1.serializeFunctionInputOutput)(args, [wfname, '<arguments>']);
|
450
|
-
args = funcArgs.deserialized;
|
451
232
|
const internalStatus = {
|
452
233
|
workflowUUID: workflowID,
|
453
234
|
status: params.queueName !== undefined ? workflow_1.StatusString.ENQUEUED : workflow_1.StatusString.PENDING,
|
@@ -467,7 +248,7 @@ class DBOSExecutor {
|
|
467
248
|
createdAt: Date.now(), // Remember the start time of this workflow,
|
468
249
|
timeoutMS: timeoutMS,
|
469
250
|
deadlineEpochMS: deadlineEpochMS,
|
470
|
-
input:
|
251
|
+
input: utils_1.DBOSJSON.stringify(args),
|
471
252
|
deduplicationID: params.enqueueOptions?.deduplicationID,
|
472
253
|
priority: priority ?? 0,
|
473
254
|
};
|
@@ -485,8 +266,8 @@ class DBOSExecutor {
|
|
485
266
|
throw new error_1.DBOSDebuggerError(`Failed to find inputs for workflow UUID ${workflowID}`);
|
486
267
|
}
|
487
268
|
// Make sure we use the same input.
|
488
|
-
if (
|
489
|
-
throw new error_1.DBOSDebuggerError(`Detected different inputs for workflow UUID ${workflowID}.\n Received: ${
|
269
|
+
if (utils_1.DBOSJSON.stringify(args) !== wfStatus.input) {
|
270
|
+
throw new error_1.DBOSDebuggerError(`Detected different inputs for workflow UUID ${workflowID}.\n Received: ${utils_1.DBOSJSON.stringify(args)}\n Original: ${wfStatus.input}`);
|
490
271
|
}
|
491
272
|
status = wfStatus.status;
|
492
273
|
}
|
@@ -575,12 +356,6 @@ class DBOSExecutor {
|
|
575
356
|
});
|
576
357
|
});
|
577
358
|
if (this.#debugMode) {
|
578
|
-
function resultsMatch(recordedResult, callResult) {
|
579
|
-
if (recordedResult === null) {
|
580
|
-
return callResult === undefined || callResult === null;
|
581
|
-
}
|
582
|
-
return utils_1.DBOSJSON.stringify(recordedResult) === utils_1.DBOSJSON.stringify(callResult);
|
583
|
-
}
|
584
359
|
const recordedResult = DBOSExecutor.reviveResultOrError((await this.systemDatabase.awaitWorkflowResult(workflowID)));
|
585
360
|
if (!resultsMatch(recordedResult, callResult)) {
|
586
361
|
this.logger.error(`Detect different output for the workflow UUID ${workflowID}!\n Received: ${utils_1.DBOSJSON.stringify(callResult)}\n Original: ${utils_1.DBOSJSON.stringify(recordedResult)}`);
|
@@ -590,9 +365,13 @@ class DBOSExecutor {
|
|
590
365
|
else {
|
591
366
|
result = callResult;
|
592
367
|
}
|
593
|
-
|
594
|
-
|
595
|
-
|
368
|
+
function resultsMatch(recordedResult, callResult) {
|
369
|
+
if (recordedResult === null) {
|
370
|
+
return callResult === undefined || callResult === null;
|
371
|
+
}
|
372
|
+
return utils_1.DBOSJSON.stringify(recordedResult) === utils_1.DBOSJSON.stringify(callResult);
|
373
|
+
}
|
374
|
+
internalStatus.output = utils_1.DBOSJSON.stringify(result);
|
596
375
|
internalStatus.status = workflow_1.StatusString.SUCCESS;
|
597
376
|
if (!this.#debugMode) {
|
598
377
|
await this.systemDatabase.recordWorkflowOutput(workflowID, internalStatus);
|
@@ -647,510 +426,6 @@ class DBOSExecutor {
|
|
647
426
|
throw new error_1.DBOSNotRegisteredError(name, `Workflow queue '${name}' is not defined.`);
|
648
427
|
return q;
|
649
428
|
}
|
650
|
-
/**
|
651
|
-
* Retrieve the transaction snapshot information of the current transaction
|
652
|
-
*/
|
653
|
-
static async #retrieveSnapshot(query) {
|
654
|
-
const rows = await query('SELECT pg_current_snapshot()::text as txn_snapshot;', []);
|
655
|
-
return rows[0].txn_snapshot;
|
656
|
-
}
|
657
|
-
/**
|
658
|
-
* Check if an operation has already executed in a workflow.
|
659
|
-
* If it previously executed successfully, return its output.
|
660
|
-
* If it previously executed and threw an error, return that error.
|
661
|
-
* Otherwise, return DBOSNull.
|
662
|
-
* Also return the transaction snapshot and id information of the original or current transaction.
|
663
|
-
*/
|
664
|
-
async #checkExecution(query, workflowUUID, funcID, funcName) {
|
665
|
-
const rows = await query(`(SELECT output, error, txn_snapshot, txn_id, function_name, true as recorded
|
666
|
-
FROM dbos.transaction_outputs
|
667
|
-
WHERE workflow_uuid=$1 AND function_id=$2
|
668
|
-
UNION ALL
|
669
|
-
SELECT null as output, null as error, pg_current_snapshot()::text as txn_snapshot,
|
670
|
-
null as txn_id, '' as function_name, false as recorded
|
671
|
-
) ORDER BY recorded`, [workflowUUID, funcID]);
|
672
|
-
if (rows.length === 0 || rows.length > 2) {
|
673
|
-
const returnedRows = JSON.stringify(rows);
|
674
|
-
this.logger.error('Unexpected! This should never happen. Returned rows: ' + returnedRows);
|
675
|
-
throw new error_1.DBOSError('This should never happen. Returned rows: ' + returnedRows);
|
676
|
-
}
|
677
|
-
if (rows.length === 2) {
|
678
|
-
if (rows[1].function_name !== funcName) {
|
679
|
-
throw new error_1.DBOSUnexpectedStepError(workflowUUID, funcID, funcName, rows[0].function_name);
|
680
|
-
}
|
681
|
-
const { txn_snapshot, txn_id } = rows[1];
|
682
|
-
const error = utils_1.DBOSJSON.parse(rows[1].error);
|
683
|
-
if (error) {
|
684
|
-
return { result: (0, serialize_error_1.deserializeError)(error), txn_snapshot, txn_id: txn_id ?? undefined };
|
685
|
-
}
|
686
|
-
else {
|
687
|
-
return { result: utils_1.DBOSJSON.parse(rows[1].output), txn_snapshot, txn_id: txn_id ?? undefined };
|
688
|
-
}
|
689
|
-
}
|
690
|
-
else {
|
691
|
-
const { txn_snapshot } = rows[0];
|
692
|
-
return { result: dbosNull, txn_snapshot, txn_id: undefined };
|
693
|
-
}
|
694
|
-
}
|
695
|
-
/**
|
696
|
-
* Write a operation's output to the database.
|
697
|
-
*/
|
698
|
-
async #recordOutput(query, workflowUUID, funcID, txnSnapshot, serialOutput, isKeyConflict, function_name) {
|
699
|
-
if (this.#debugMode) {
|
700
|
-
throw new error_1.DBOSDebuggerError('Cannot record output in debug mode.');
|
701
|
-
}
|
702
|
-
try {
|
703
|
-
const rows = await query('INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, output, txn_id, txn_snapshot, created_at, function_name) VALUES ($1, $2, $3, (select pg_current_xact_id_if_assigned()::text), $4, $5, $6) RETURNING txn_id;', [workflowUUID, funcID, serialOutput, txnSnapshot, Date.now(), function_name]);
|
704
|
-
return rows[0].txn_id;
|
705
|
-
}
|
706
|
-
catch (error) {
|
707
|
-
if (isKeyConflict(error)) {
|
708
|
-
// Serialization and primary key conflict (Postgres).
|
709
|
-
throw new error_1.DBOSWorkflowConflictError(workflowUUID);
|
710
|
-
}
|
711
|
-
else {
|
712
|
-
throw error;
|
713
|
-
}
|
714
|
-
}
|
715
|
-
}
|
716
|
-
/**
|
717
|
-
* Record an error in an operation to the database.
|
718
|
-
*/
|
719
|
-
async #recordError(query, workflowUUID, funcID, txnSnapshot, err, isKeyConflict, function_name) {
|
720
|
-
if (this.#debugMode) {
|
721
|
-
throw new error_1.DBOSDebuggerError('Cannot record error in debug mode.');
|
722
|
-
}
|
723
|
-
try {
|
724
|
-
const serialErr = utils_1.DBOSJSON.stringify((0, serialize_error_1.serializeError)(err));
|
725
|
-
await query('INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, error, txn_id, txn_snapshot, created_at, function_name) VALUES ($1, $2, $3, null, $4, $5, $6) RETURNING txn_id;', [workflowUUID, funcID, serialErr, txnSnapshot, Date.now(), function_name]);
|
726
|
-
}
|
727
|
-
catch (error) {
|
728
|
-
if (isKeyConflict(error)) {
|
729
|
-
// Serialization and primary key conflict (Postgres).
|
730
|
-
throw new error_1.DBOSWorkflowConflictError(workflowUUID);
|
731
|
-
}
|
732
|
-
else {
|
733
|
-
throw error;
|
734
|
-
}
|
735
|
-
}
|
736
|
-
}
|
737
|
-
async getTransactions(workflowUUID) {
|
738
|
-
if (this.#userDatabase) {
|
739
|
-
const rows = await this.#userDatabase.query(`SELECT function_id, function_name, output, error FROM ${DBOSExecutor.systemDBSchemaName}.transaction_outputs WHERE workflow_uuid=$1`, workflowUUID);
|
740
|
-
for (const row of rows) {
|
741
|
-
row.output = row.output !== null ? utils_1.DBOSJSON.parse(row.output) : null;
|
742
|
-
row.error = row.error !== null ? (0, serialize_error_1.deserializeError)(utils_1.DBOSJSON.parse(row.error)) : null;
|
743
|
-
}
|
744
|
-
return rows;
|
745
|
-
}
|
746
|
-
else {
|
747
|
-
return [];
|
748
|
-
}
|
749
|
-
}
|
750
|
-
async runTransactionTempWF(txn, params, ...args) {
|
751
|
-
return await (await this.startTransactionTempWF(txn, params, undefined, undefined, ...args)).getResult();
|
752
|
-
}
|
753
|
-
async startTransactionTempWF(txn, params, callerWFID, callerFunctionID, ...args) {
|
754
|
-
// Create a workflow and call transaction.
|
755
|
-
const temp_workflow = async (...args) => {
|
756
|
-
return await this.callTransactionFunction(txn, params.configuredInstance ?? null, ...args);
|
757
|
-
};
|
758
|
-
return await this.internalWorkflow(temp_workflow, {
|
759
|
-
...params,
|
760
|
-
tempWfType: exports.TempWorkflowType.transaction,
|
761
|
-
tempWfName: (0, decorators_1.getRegisteredFunctionName)(txn),
|
762
|
-
tempWfClass: (0, decorators_1.getRegisteredFunctionClassName)(txn),
|
763
|
-
}, callerWFID, callerFunctionID, ...args);
|
764
|
-
}
|
765
|
-
async callTransactionFunction(txn, clsinst, ...args) {
|
766
|
-
const userDB = this.#userDatabase;
|
767
|
-
if (!userDB) {
|
768
|
-
throw new Error('No user database configured for transactions.');
|
769
|
-
}
|
770
|
-
const txnReg = (0, decorators_1.getFunctionRegistration)(txn);
|
771
|
-
if (!txnReg || !txnReg.txnConfig) {
|
772
|
-
throw new error_1.DBOSNotRegisteredError(txn.name);
|
773
|
-
}
|
774
|
-
const funcId = (0, context_1.functionIDGetIncrement)();
|
775
|
-
const pctx = { ...(0, context_1.getCurrentContextStore)() };
|
776
|
-
const wfid = pctx.workflowId;
|
777
|
-
await this.systemDatabase.checkIfCanceled(wfid);
|
778
|
-
let retryWaitMillis = 1;
|
779
|
-
const backoffFactor = 1.5;
|
780
|
-
const maxRetryWaitMs = 2000; // Maximum wait 2 seconds.
|
781
|
-
const span = this.tracer.startSpan(txn.name, {
|
782
|
-
operationUUID: wfid,
|
783
|
-
operationType: exports.OperationType.TRANSACTION,
|
784
|
-
operationName: txn.name,
|
785
|
-
authenticatedUser: pctx.authenticatedUser ?? '',
|
786
|
-
assumedRole: pctx.assumedRole ?? '',
|
787
|
-
authenticatedRoles: pctx.authenticatedRoles ?? [],
|
788
|
-
isolationLevel: txnReg.txnConfig.isolationLevel,
|
789
|
-
});
|
790
|
-
while (true) {
|
791
|
-
await this.systemDatabase.checkIfCanceled(wfid);
|
792
|
-
let txn_snapshot = 'invalid';
|
793
|
-
let prevResultFound = false;
|
794
|
-
const wrappedTransaction = async (client) => {
|
795
|
-
// If the UUID is preset, it is possible this execution previously happened. Check, and return its original result if it did.
|
796
|
-
// Note: It is possible to retrieve a generated ID from a workflow handle, run a concurrent execution, and cause trouble for yourself. We recommend against this.
|
797
|
-
let prevResult = dbosNull;
|
798
|
-
const queryFunc = (sql, args) => userDB.queryWithClient(client, sql, ...args);
|
799
|
-
if (pctx.presetID) {
|
800
|
-
const executionResult = await this.#checkExecution(queryFunc, wfid, funcId, txn.name);
|
801
|
-
prevResult = executionResult.result;
|
802
|
-
txn_snapshot = executionResult.txn_snapshot;
|
803
|
-
if (prevResult !== dbosNull) {
|
804
|
-
prevResultFound = true;
|
805
|
-
span.setAttribute('cached', true);
|
806
|
-
// Return/throw the previous result
|
807
|
-
if (prevResult instanceof Error) {
|
808
|
-
throw prevResult;
|
809
|
-
}
|
810
|
-
else {
|
811
|
-
return prevResult;
|
812
|
-
}
|
813
|
-
}
|
814
|
-
}
|
815
|
-
else {
|
816
|
-
// Collect snapshot information for read-only transactions and non-preset UUID transactions, if not already collected above
|
817
|
-
txn_snapshot = await DBOSExecutor.#retrieveSnapshot(queryFunc);
|
818
|
-
}
|
819
|
-
if (this.#debugMode && prevResult === dbosNull) {
|
820
|
-
throw new error_1.DBOSDebuggerError(`Failed to find the recorded output for the transaction: workflow UUID ${wfid}, step number ${funcId}`);
|
821
|
-
}
|
822
|
-
// Execute the user's transaction.
|
823
|
-
const ctxlog = this.ctxLogger;
|
824
|
-
const result = await (async function () {
|
825
|
-
try {
|
826
|
-
return await api_1.context.with(api_1.trace.setSpan(api_1.context.active(), span), async () => {
|
827
|
-
return await (0, context_1.runWithParentContext)(pctx, {
|
828
|
-
authenticatedRoles: pctx?.authenticatedRoles,
|
829
|
-
authenticatedUser: pctx?.authenticatedUser,
|
830
|
-
workflowId: wfid,
|
831
|
-
curTxFunctionId: funcId,
|
832
|
-
parentCtx: pctx,
|
833
|
-
sqlClient: client,
|
834
|
-
logger: ctxlog,
|
835
|
-
}, async () => {
|
836
|
-
const tf = txn;
|
837
|
-
return await tf.call(clsinst, ...args);
|
838
|
-
});
|
839
|
-
});
|
840
|
-
}
|
841
|
-
catch (e) {
|
842
|
-
return e instanceof Error ? e : new Error(`${e}`);
|
843
|
-
}
|
844
|
-
})();
|
845
|
-
if (this.#debugMode) {
|
846
|
-
if (prevResult instanceof Error) {
|
847
|
-
throw prevResult;
|
848
|
-
}
|
849
|
-
const prevResultJson = utils_1.DBOSJSON.stringify(prevResult);
|
850
|
-
const resultJson = utils_1.DBOSJSON.stringify(result);
|
851
|
-
if (prevResultJson !== resultJson) {
|
852
|
-
this.logger.error(`Detected different transaction output than the original one!\n Result: ${resultJson}\n Original: ${utils_1.DBOSJSON.stringify(prevResultJson)}`);
|
853
|
-
}
|
854
|
-
return prevResult;
|
855
|
-
}
|
856
|
-
if (result instanceof Error) {
|
857
|
-
throw result;
|
858
|
-
}
|
859
|
-
// Record the execution, commit, and return.
|
860
|
-
const funcOutput = (0, utils_1.serializeFunctionInputOutput)(result, [txn.name, '<result>']);
|
861
|
-
try {
|
862
|
-
// Synchronously record the output of write transactions and obtain the transaction ID.
|
863
|
-
const pg_txn_id = await this.#recordOutput(queryFunc, wfid, funcId, txn_snapshot, funcOutput.stringified, (error) => userDB.isKeyConflictError(error), txn.name);
|
864
|
-
span.setAttribute('pg_txn_id', pg_txn_id);
|
865
|
-
}
|
866
|
-
catch (error) {
|
867
|
-
if (userDB.isFailedSqlTransactionError(error)) {
|
868
|
-
this.logger.error(`Postgres aborted the ${txn.name} @DBOS.transaction of Workflow ${wfid}, but the function did not raise an exception. Please ensure that the @DBOS.transaction method raises an exception if the database transaction is aborted.`);
|
869
|
-
throw new error_1.DBOSFailedSqlTransactionError(wfid, txn.name);
|
870
|
-
}
|
871
|
-
else {
|
872
|
-
throw error;
|
873
|
-
}
|
874
|
-
}
|
875
|
-
return funcOutput.deserialized;
|
876
|
-
};
|
877
|
-
try {
|
878
|
-
const result = await userDB.transaction(wrappedTransaction, txnReg.txnConfig);
|
879
|
-
span.setStatus({ code: api_1.SpanStatusCode.OK });
|
880
|
-
this.tracer.endSpan(span);
|
881
|
-
return result;
|
882
|
-
}
|
883
|
-
catch (err) {
|
884
|
-
const e = err;
|
885
|
-
if (!prevResultFound && !this.#debugMode && !(e instanceof error_1.DBOSUnexpectedStepError)) {
|
886
|
-
if (userDB.isRetriableTransactionError(err)) {
|
887
|
-
// serialization_failure in PostgreSQL
|
888
|
-
span.addEvent('TXN SERIALIZATION FAILURE', { retryWaitMillis: retryWaitMillis }, performance.now());
|
889
|
-
// Retry serialization failures.
|
890
|
-
await (0, utils_1.sleepms)(retryWaitMillis);
|
891
|
-
retryWaitMillis *= backoffFactor;
|
892
|
-
retryWaitMillis = retryWaitMillis < maxRetryWaitMs ? retryWaitMillis : maxRetryWaitMs;
|
893
|
-
continue;
|
894
|
-
}
|
895
|
-
// Record and throw other errors.
|
896
|
-
const e = err;
|
897
|
-
await userDB.transaction(async (client) => {
|
898
|
-
const func = (sql, args) => userDB.queryWithClient(client, sql, ...args);
|
899
|
-
await this.#recordError(func, wfid, funcId, txn_snapshot, e, (error) => userDB.isKeyConflictError(error), txn.name);
|
900
|
-
}, { isolationLevel: transaction_1.IsolationLevel.ReadCommitted });
|
901
|
-
}
|
902
|
-
span.setStatus({ code: api_1.SpanStatusCode.ERROR, message: e.message });
|
903
|
-
this.tracer.endSpan(span);
|
904
|
-
throw err;
|
905
|
-
}
|
906
|
-
}
|
907
|
-
}
|
908
|
-
async runProcedureTempWF(proc, params, ...args) {
|
909
|
-
// Create a workflow and call procedure.
|
910
|
-
const temp_workflow = async (...args) => {
|
911
|
-
return this.callProcedureFunction(proc, ...args);
|
912
|
-
};
|
913
|
-
return await (await this.workflow(temp_workflow, {
|
914
|
-
...params,
|
915
|
-
tempWfType: exports.TempWorkflowType.procedure,
|
916
|
-
tempWfName: (0, decorators_1.getRegisteredFunctionName)(proc),
|
917
|
-
tempWfClass: (0, decorators_1.getRegisteredFunctionClassName)(proc),
|
918
|
-
}, ...args)).getResult();
|
919
|
-
}
|
920
|
-
async callProcedureFunction(proc, ...args) {
|
921
|
-
const procInfo = (0, decorators_1.getFunctionRegistration)(proc);
|
922
|
-
if (!procInfo || !procInfo.procConfig) {
|
923
|
-
throw new error_1.DBOSNotRegisteredError(proc.name);
|
924
|
-
}
|
925
|
-
const procConfig = procInfo.procConfig;
|
926
|
-
const pctx = (0, context_1.getCurrentContextStore)();
|
927
|
-
const wfid = pctx.workflowId;
|
928
|
-
await this.systemDatabase.checkIfCanceled(wfid);
|
929
|
-
const executeLocally = this.#debugMode || (procConfig.executeLocally ?? false);
|
930
|
-
const funcId = (0, context_1.functionIDGetIncrement)();
|
931
|
-
const span = this.tracer.startSpan(proc.name, {
|
932
|
-
operationUUID: wfid,
|
933
|
-
operationType: exports.OperationType.PROCEDURE,
|
934
|
-
operationName: proc.name,
|
935
|
-
authenticatedUser: pctx.authenticatedUser ?? '',
|
936
|
-
assumedRole: pctx.assumedRole ?? '',
|
937
|
-
authenticatedRoles: pctx.authenticatedRoles ?? [],
|
938
|
-
isolationLevel: procInfo.procConfig.isolationLevel,
|
939
|
-
executeLocally,
|
940
|
-
});
|
941
|
-
try {
|
942
|
-
const result = executeLocally
|
943
|
-
? await this.#callProcedureFunctionLocal(proc, args, span, procInfo, funcId)
|
944
|
-
: await this.#callProcedureFunctionRemote(proc, args, span, procConfig, funcId);
|
945
|
-
span.setStatus({ code: api_1.SpanStatusCode.OK });
|
946
|
-
return result;
|
947
|
-
}
|
948
|
-
catch (e) {
|
949
|
-
const { message } = e;
|
950
|
-
span.setStatus({ code: api_1.SpanStatusCode.ERROR, message });
|
951
|
-
throw e;
|
952
|
-
}
|
953
|
-
finally {
|
954
|
-
this.tracer.endSpan(span);
|
955
|
-
}
|
956
|
-
}
|
957
|
-
async #callProcedureFunctionLocal(proc, args, span, procInfo, funcId) {
|
958
|
-
const procPool = this.#procedurePool;
|
959
|
-
const userDB = this.#userDatabase;
|
960
|
-
if (!procPool || !userDB) {
|
961
|
-
throw new Error('User database not enabled.');
|
962
|
-
}
|
963
|
-
let retryWaitMillis = 1;
|
964
|
-
const backoffFactor = 1.5;
|
965
|
-
const maxRetryWaitMs = 2000; // Maximum wait 2 seconds.
|
966
|
-
const pctx = { ...(0, context_1.getCurrentContextStore)() };
|
967
|
-
const wfid = pctx.workflowId;
|
968
|
-
while (true) {
|
969
|
-
await this.systemDatabase.checkIfCanceled(wfid);
|
970
|
-
let txn_snapshot = 'invalid';
|
971
|
-
const wrappedProcedure = async (client) => {
|
972
|
-
let prevResult = dbosNull;
|
973
|
-
const queryFunc = (sql, args) => procPool.query(sql, args).then((v) => v.rows);
|
974
|
-
if (pctx.presetID) {
|
975
|
-
const executionResult = await this.#checkExecution(queryFunc, wfid, funcId, proc.name);
|
976
|
-
prevResult = executionResult.result;
|
977
|
-
txn_snapshot = executionResult.txn_snapshot;
|
978
|
-
if (prevResult !== dbosNull) {
|
979
|
-
span.setAttribute('cached', true);
|
980
|
-
// Return/throw the previous result
|
981
|
-
if (prevResult instanceof Error) {
|
982
|
-
throw prevResult;
|
983
|
-
}
|
984
|
-
else {
|
985
|
-
return prevResult;
|
986
|
-
}
|
987
|
-
}
|
988
|
-
}
|
989
|
-
else {
|
990
|
-
// Collect snapshot information for read-only transactions and non-preset UUID transactions, if not already collected above
|
991
|
-
txn_snapshot = await DBOSExecutor.#retrieveSnapshot(queryFunc);
|
992
|
-
}
|
993
|
-
if (this.#debugMode && prevResult === dbosNull) {
|
994
|
-
throw new error_1.DBOSDebuggerError(`Failed to find the recorded output for the procedure: workflow UUID ${wfid}, step number ${funcId}`);
|
995
|
-
}
|
996
|
-
// Execute the user's transaction.
|
997
|
-
const ctxlog = this.ctxLogger;
|
998
|
-
const result = await (async function () {
|
999
|
-
try {
|
1000
|
-
// Check we are in a workflow context and not in a step / transaction already
|
1001
|
-
if (!pctx)
|
1002
|
-
throw new error_1.DBOSInvalidWorkflowTransitionError();
|
1003
|
-
if (!(0, context_1.isInWorkflowCtx)(pctx))
|
1004
|
-
throw new error_1.DBOSInvalidWorkflowTransitionError();
|
1005
|
-
return await api_1.context.with(api_1.trace.setSpan(api_1.context.active(), span), async () => {
|
1006
|
-
return await (0, context_1.runWithParentContext)(pctx, {
|
1007
|
-
curTxFunctionId: funcId,
|
1008
|
-
parentCtx: pctx,
|
1009
|
-
isInStoredProc: true,
|
1010
|
-
sqlClient: client,
|
1011
|
-
logger: ctxlog,
|
1012
|
-
}, async () => {
|
1013
|
-
const pf = proc;
|
1014
|
-
return await pf(...args);
|
1015
|
-
});
|
1016
|
-
});
|
1017
|
-
}
|
1018
|
-
catch (e) {
|
1019
|
-
return e instanceof Error ? e : new Error(`${e}`);
|
1020
|
-
}
|
1021
|
-
})();
|
1022
|
-
if (this.#debugMode) {
|
1023
|
-
if (prevResult instanceof Error) {
|
1024
|
-
throw prevResult;
|
1025
|
-
}
|
1026
|
-
const prevResultJson = utils_1.DBOSJSON.stringify(prevResult);
|
1027
|
-
const resultJson = utils_1.DBOSJSON.stringify(result);
|
1028
|
-
if (prevResultJson !== resultJson) {
|
1029
|
-
this.logger.error(`Detected different transaction output than the original one!\n Result: ${resultJson}\n Original: ${utils_1.DBOSJSON.stringify(prevResultJson)}`);
|
1030
|
-
}
|
1031
|
-
return prevResult;
|
1032
|
-
}
|
1033
|
-
if (result instanceof Error) {
|
1034
|
-
throw result;
|
1035
|
-
}
|
1036
|
-
// Synchronously record the output of write transactions and obtain the transaction ID.
|
1037
|
-
const func = (sql, args) => client.query(sql, args).then((v) => v.rows);
|
1038
|
-
const funcResult = (0, utils_1.serializeFunctionInputOutput)(result, [proc.name, '<result>']);
|
1039
|
-
const pg_txn_id = await this.#recordOutput(func, wfid, funcId, txn_snapshot, funcResult.stringified, user_database_1.pgNodeIsKeyConflictError, proc.name);
|
1040
|
-
// const pg_txn_id = await wfCtx.recordOutputProc<R>(client, funcId, txn_snapshot, result);
|
1041
|
-
span.setAttribute('pg_txn_id', pg_txn_id);
|
1042
|
-
return funcResult.deserialized;
|
1043
|
-
};
|
1044
|
-
try {
|
1045
|
-
const result = await this.invokeStoredProcFunction(wrappedProcedure, {
|
1046
|
-
isolationLevel: procInfo.procConfig.isolationLevel,
|
1047
|
-
});
|
1048
|
-
span.setStatus({ code: api_1.SpanStatusCode.OK });
|
1049
|
-
return result;
|
1050
|
-
}
|
1051
|
-
catch (err) {
|
1052
|
-
if (!this.#debugMode) {
|
1053
|
-
if (userDB.isRetriableTransactionError(err)) {
|
1054
|
-
// serialization_failure in PostgreSQL
|
1055
|
-
span.addEvent('TXN SERIALIZATION FAILURE', { retryWaitMillis: retryWaitMillis }, performance.now());
|
1056
|
-
// Retry serialization failures.
|
1057
|
-
await (0, utils_1.sleepms)(retryWaitMillis);
|
1058
|
-
retryWaitMillis *= backoffFactor;
|
1059
|
-
retryWaitMillis = retryWaitMillis < maxRetryWaitMs ? retryWaitMillis : maxRetryWaitMs;
|
1060
|
-
continue;
|
1061
|
-
}
|
1062
|
-
// Record and throw other errors.
|
1063
|
-
const e = err;
|
1064
|
-
await this.invokeStoredProcFunction(async (client) => {
|
1065
|
-
const func = (sql, args) => client.query(sql, args).then((v) => v.rows);
|
1066
|
-
await this.#recordError(func, wfid, funcId, txn_snapshot, e, user_database_1.pgNodeIsKeyConflictError, proc.name);
|
1067
|
-
}, { isolationLevel: transaction_1.IsolationLevel.ReadCommitted });
|
1068
|
-
await userDB.transaction(async (client) => {
|
1069
|
-
const func = (sql, args) => userDB.queryWithClient(client, sql, ...args);
|
1070
|
-
await this.#recordError(func, wfid, funcId, txn_snapshot, e, (error) => userDB.isKeyConflictError(error), proc.name);
|
1071
|
-
}, { isolationLevel: transaction_1.IsolationLevel.ReadCommitted });
|
1072
|
-
}
|
1073
|
-
throw err;
|
1074
|
-
}
|
1075
|
-
}
|
1076
|
-
}
|
1077
|
-
async #callProcedureFunctionRemote(proc, args, span, config, funcId) {
|
1078
|
-
if (this.#debugMode) {
|
1079
|
-
throw new error_1.DBOSDebuggerError("Can't invoke stored procedure in debug mode.");
|
1080
|
-
}
|
1081
|
-
const pctx = (0, context_1.getCurrentContextStore)();
|
1082
|
-
const wfid = pctx.workflowId;
|
1083
|
-
await this.systemDatabase.checkIfCanceled(wfid);
|
1084
|
-
const $jsonCtx = {
|
1085
|
-
request: pctx.request,
|
1086
|
-
authenticatedUser: pctx.authenticatedUser,
|
1087
|
-
authenticatedRoles: pctx.authenticatedRoles,
|
1088
|
-
assumedRole: pctx.assumedRole,
|
1089
|
-
};
|
1090
|
-
// TODO (Qian/Harry): remove this unshift when we remove the resultBuffer argument
|
1091
|
-
// Note, node-pg converts JS arrays to postgres array literals, so must call JSON.strigify on
|
1092
|
-
// args and bufferedResults before being passed to #invokeStoredProc
|
1093
|
-
const $args = [wfid, funcId, pctx.presetID, $jsonCtx, null, JSON.stringify(args)];
|
1094
|
-
const readonly = config.readOnly ?? false;
|
1095
|
-
if (!readonly) {
|
1096
|
-
$args.unshift(null);
|
1097
|
-
}
|
1098
|
-
const [{ return_value }] = await this.#invokeStoredProc(proc, $args);
|
1099
|
-
const { error, output, txn_id } = return_value;
|
1100
|
-
// if the stored proc returns an error, deserialize and throw it.
|
1101
|
-
// stored proc saves the error in tx_output before returning
|
1102
|
-
if (error) {
|
1103
|
-
throw (0, serialize_error_1.deserializeError)(error);
|
1104
|
-
}
|
1105
|
-
if (txn_id) {
|
1106
|
-
span.setAttribute('pg_txn_id', txn_id);
|
1107
|
-
}
|
1108
|
-
span.setStatus({ code: api_1.SpanStatusCode.OK });
|
1109
|
-
return output;
|
1110
|
-
}
|
1111
|
-
async #invokeStoredProc(proc, args) {
|
1112
|
-
if (!this.#procedurePool) {
|
1113
|
-
throw new Error('User Database not enabled.');
|
1114
|
-
}
|
1115
|
-
const client = await this.#procedurePool.connect();
|
1116
|
-
const log = (msg) => this.#logNotice(msg);
|
1117
|
-
const procname = (0, decorators_1.getRegisteredFunctionFullName)(proc);
|
1118
|
-
const plainProcName = `${procname.className}_${procname.name}_p`;
|
1119
|
-
const procName = utils_1.globalParams.wasComputed ? plainProcName : `v${utils_1.globalParams.appVersion}_${plainProcName}`;
|
1120
|
-
const sql = `CALL "${procName}"(${args.map((_v, i) => `$${i + 1}`).join()});`;
|
1121
|
-
try {
|
1122
|
-
client.on('notice', log);
|
1123
|
-
return await client.query(sql, args).then((value) => value.rows);
|
1124
|
-
}
|
1125
|
-
finally {
|
1126
|
-
client.off('notice', log);
|
1127
|
-
client.release();
|
1128
|
-
}
|
1129
|
-
}
|
1130
|
-
async invokeStoredProcFunction(func, config) {
|
1131
|
-
if (!this.#procedurePool) {
|
1132
|
-
throw new Error('User Database not enabled.');
|
1133
|
-
}
|
1134
|
-
const client = await this.#procedurePool.connect();
|
1135
|
-
try {
|
1136
|
-
const readOnly = config.readOnly ?? false;
|
1137
|
-
const isolationLevel = config.isolationLevel ?? transaction_1.IsolationLevel.Serializable;
|
1138
|
-
await client.query(`BEGIN ISOLATION LEVEL ${isolationLevel}`);
|
1139
|
-
if (readOnly) {
|
1140
|
-
await client.query(`SET TRANSACTION READ ONLY`);
|
1141
|
-
}
|
1142
|
-
const result = await func(client);
|
1143
|
-
await client.query(`COMMIT`);
|
1144
|
-
return result;
|
1145
|
-
}
|
1146
|
-
catch (err) {
|
1147
|
-
await client.query(`ROLLBACK`);
|
1148
|
-
throw err;
|
1149
|
-
}
|
1150
|
-
finally {
|
1151
|
-
client.release();
|
1152
|
-
}
|
1153
|
-
}
|
1154
429
|
async runStepTempWF(stepFn, params, ...args) {
|
1155
430
|
return await (await this.startStepTempWF(stepFn, params, undefined, undefined, ...args)).getResult();
|
1156
431
|
}
|
@@ -1278,13 +553,12 @@ class DBOSExecutor {
|
|
1278
553
|
}
|
1279
554
|
else {
|
1280
555
|
// Record the execution and return.
|
1281
|
-
const funcResult = (0, utils_1.serializeFunctionInputOutput)(result, [stepFnName, '<result>']);
|
1282
556
|
await this.systemDatabase.recordOperationResult(wfid, funcID, stepFnName, true, {
|
1283
|
-
output:
|
557
|
+
output: utils_1.DBOSJSON.stringify(result),
|
1284
558
|
});
|
1285
559
|
span.setStatus({ code: api_1.SpanStatusCode.OK });
|
1286
560
|
this.tracer.endSpan(span);
|
1287
|
-
return
|
561
|
+
return result;
|
1288
562
|
}
|
1289
563
|
}
|
1290
564
|
async runSendTempWF(destinationId, message, topic, idempotencyKey) {
|
@@ -1313,7 +587,7 @@ class DBOSExecutor {
|
|
1313
587
|
*/
|
1314
588
|
forkWorkflow(workflowID, startStep, options = {}) {
|
1315
589
|
const newWorkflowID = options.newWorkflowID ?? (0, context_1.getNextWFID)(undefined);
|
1316
|
-
return (0, workflow_management_1.forkWorkflow)(this.systemDatabase,
|
590
|
+
return (0, workflow_management_1.forkWorkflow)(this.systemDatabase, workflowID, startStep, { ...options, newWorkflowID });
|
1317
591
|
}
|
1318
592
|
/**
|
1319
593
|
* Retrieve a handle for a workflow UUID.
|
@@ -1331,12 +605,11 @@ class DBOSExecutor {
|
|
1331
605
|
}
|
1332
606
|
try {
|
1333
607
|
const output = await callback();
|
1334
|
-
const funcOutput = (0, utils_1.serializeFunctionInputOutput)(output, [functionName, '<result>']);
|
1335
608
|
await this.systemDatabase.recordOperationResult(workflowID, functionID, functionName, true, {
|
1336
|
-
output:
|
609
|
+
output: utils_1.DBOSJSON.stringify(output),
|
1337
610
|
childWorkflowID: childWfId,
|
1338
611
|
});
|
1339
|
-
return
|
612
|
+
return output;
|
1340
613
|
}
|
1341
614
|
catch (e) {
|
1342
615
|
await this.systemDatabase.recordOperationResult(workflowID, functionID, functionName, false, {
|
@@ -1358,18 +631,7 @@ class DBOSExecutor {
|
|
1358
631
|
return (0, workflow_management_1.listQueuedWorkflows)(this.systemDatabase, input);
|
1359
632
|
}
|
1360
633
|
async listWorkflowSteps(workflowID) {
|
1361
|
-
return (0, workflow_management_1.listWorkflowSteps)(this.systemDatabase,
|
1362
|
-
}
|
1363
|
-
async queryUserDB(sql, params) {
|
1364
|
-
if (!this.#userDatabase) {
|
1365
|
-
throw new Error('User database not enabled.');
|
1366
|
-
}
|
1367
|
-
if (params !== undefined) {
|
1368
|
-
return await this.#userDatabase.query(sql, ...params);
|
1369
|
-
}
|
1370
|
-
else {
|
1371
|
-
return await this.#userDatabase.query(sql);
|
1372
|
-
}
|
634
|
+
return (0, workflow_management_1.listWorkflowSteps)(this.systemDatabase, workflowID);
|
1373
635
|
}
|
1374
636
|
/* INTERNAL HELPERS */
|
1375
637
|
/**
|
@@ -1475,22 +737,7 @@ class DBOSExecutor {
|
|
1475
737
|
if (!nameArr[0].startsWith(DBOSExecutor.#tempWorkflowName)) {
|
1476
738
|
throw new error_1.DBOSError(`Cannot find workflow function for a non-temporary workflow, ID ${workflowID}, class '${wfStatus.workflowClassName}', function '${wfName}'; did you change your code?`);
|
1477
739
|
}
|
1478
|
-
if (nameArr[1] === exports.TempWorkflowType.
|
1479
|
-
const txnReg = (0, decorators_1.getFunctionRegistrationByName)(wfStatus.workflowClassName, nameArr[2]);
|
1480
|
-
if (!txnReg?.txnConfig) {
|
1481
|
-
this.logger.error(`Cannot find transaction info for ID ${workflowID}, name ${nameArr[2]}`);
|
1482
|
-
throw new error_1.DBOSNotRegisteredError(nameArr[2]);
|
1483
|
-
}
|
1484
|
-
return await (0, context_1.runWithTopContext)(recoverCtx, async () => {
|
1485
|
-
return await this.startTransactionTempWF(txnReg.registeredFunction, {
|
1486
|
-
workflowUUID: workflowStartID,
|
1487
|
-
configuredInstance: configuredInst,
|
1488
|
-
queueName: wfStatus.queueName,
|
1489
|
-
executeWorkflow: true,
|
1490
|
-
}, undefined, undefined, ...inputs);
|
1491
|
-
});
|
1492
|
-
}
|
1493
|
-
else if (nameArr[1] === exports.TempWorkflowType.step) {
|
740
|
+
if (nameArr[1] === exports.TempWorkflowType.step) {
|
1494
741
|
const stepReg = (0, decorators_1.getFunctionRegistrationByName)(wfStatus.workflowClassName, nameArr[2]);
|
1495
742
|
if (!stepReg?.stepConfig) {
|
1496
743
|
this.logger.error(`Cannot find step info for ID ${workflowID}, name ${nameArr[2]}`);
|
@@ -1560,19 +807,6 @@ class DBOSExecutor {
|
|
1560
807
|
async resumeWorkflow(workflowID) {
|
1561
808
|
await this.systemDatabase.resumeWorkflow(workflowID);
|
1562
809
|
}
|
1563
|
-
logRegisteredHTTPUrls() {
|
1564
|
-
this.logger.info('HTTP endpoints supported:');
|
1565
|
-
(0, decorators_1.getAllRegisteredFunctions)().forEach((registeredOperation) => {
|
1566
|
-
const ro = registeredOperation;
|
1567
|
-
if (ro.apiURL) {
|
1568
|
-
this.logger.info(' ' + ro.apiType.padEnd(6) + ' : ' + ro.apiURL);
|
1569
|
-
const roles = ro.getRequiredRoles();
|
1570
|
-
if (roles.length > 0) {
|
1571
|
-
this.logger.info(' Required Roles: ' + utils_1.DBOSJSON.stringify(roles));
|
1572
|
-
}
|
1573
|
-
}
|
1574
|
-
});
|
1575
|
-
}
|
1576
810
|
/**
|
1577
811
|
An application's version is computed from a hash of the source of its workflows.
|
1578
812
|
This is guaranteed to be stable given identical source code because it uses an MD5 hash
|