@dbos-inc/dbos-sdk 3.5.44-preview.gc094fdab44 → 3.6.3-preview
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -0
- package/dist/schemas/user_db_schema.d.ts +15 -0
- package/dist/schemas/user_db_schema.d.ts.map +1 -0
- package/dist/schemas/user_db_schema.js +33 -0
- package/dist/schemas/user_db_schema.js.map +1 -0
- package/dist/src/client.d.ts +4 -2
- package/dist/src/client.d.ts.map +1 -1
- package/dist/src/client.js +19 -6
- package/dist/src/client.js.map +1 -1
- package/dist/src/conductor/conductor.js +1 -1
- package/dist/src/conductor/conductor.js.map +1 -1
- package/dist/src/context.d.ts +3 -0
- package/dist/src/context.d.ts.map +1 -1
- package/dist/src/context.js.map +1 -1
- package/dist/src/dbos-executor.d.ts +27 -2
- package/dist/src/dbos-executor.d.ts.map +1 -1
- package/dist/src/dbos-executor.js +787 -21
- package/dist/src/dbos-executor.js.map +1 -1
- package/dist/src/{cli → dbos-runtime}/cli.d.ts +1 -1
- package/dist/src/dbos-runtime/cli.d.ts.map +1 -0
- package/dist/src/{cli → dbos-runtime}/cli.js +12 -3
- package/dist/src/dbos-runtime/cli.js.map +1 -0
- package/dist/src/dbos-runtime/commands.d.ts.map +1 -0
- package/dist/src/dbos-runtime/commands.js.map +1 -0
- package/dist/src/{config.d.ts → dbos-runtime/config.d.ts} +6 -2
- package/dist/src/dbos-runtime/config.d.ts.map +1 -0
- package/dist/src/{config.js → dbos-runtime/config.js} +43 -9
- package/dist/src/dbos-runtime/config.js.map +1 -0
- package/dist/src/dbos-runtime/docker_pg_helper.d.ts.map +1 -0
- package/dist/src/dbos-runtime/docker_pg_helper.js.map +1 -0
- package/dist/src/dbos-runtime/migrate.d.ts +3 -0
- package/dist/src/dbos-runtime/migrate.d.ts.map +1 -0
- package/dist/src/{cli → dbos-runtime}/migrate.js +29 -1
- package/dist/src/dbos-runtime/migrate.js.map +1 -0
- package/dist/src/{workflow_management.d.ts → dbos-runtime/workflow_management.d.ts} +6 -4
- package/dist/src/dbos-runtime/workflow_management.d.ts.map +1 -0
- package/dist/src/{workflow_management.js → dbos-runtime/workflow_management.js} +26 -5
- package/dist/src/dbos-runtime/workflow_management.js.map +1 -0
- package/dist/src/dbos.d.ts +196 -2
- package/dist/src/dbos.d.ts.map +1 -1
- package/dist/src/dbos.js +407 -11
- package/dist/src/dbos.js.map +1 -1
- package/dist/src/decorators.d.ts +38 -4
- package/dist/src/decorators.d.ts.map +1 -1
- package/dist/src/decorators.js +164 -9
- package/dist/src/decorators.js.map +1 -1
- package/dist/src/error.d.ts +8 -0
- package/dist/src/error.d.ts.map +1 -1
- package/dist/src/error.js +14 -1
- package/dist/src/error.js.map +1 -1
- package/dist/src/httpServer/handler.d.ts +13 -0
- package/dist/src/httpServer/handler.d.ts.map +1 -0
- package/dist/src/httpServer/handler.js +25 -0
- package/dist/src/httpServer/handler.js.map +1 -0
- package/dist/src/httpServer/handlerTypes.d.ts +14 -0
- package/dist/src/httpServer/handlerTypes.d.ts.map +1 -0
- package/dist/src/httpServer/handlerTypes.js +22 -0
- package/dist/src/httpServer/handlerTypes.js.map +1 -0
- package/dist/src/httpServer/middleware.d.ts +74 -0
- package/dist/src/httpServer/middleware.d.ts.map +1 -0
- package/dist/src/httpServer/middleware.js +108 -0
- package/dist/src/httpServer/middleware.js.map +1 -0
- package/dist/src/{adminserver.d.ts → httpServer/server.d.ts} +15 -6
- package/dist/src/httpServer/server.d.ts.map +1 -0
- package/dist/src/{adminserver.js → httpServer/server.js} +311 -28
- package/dist/src/httpServer/server.js.map +1 -0
- package/dist/src/index.d.ts +12 -3
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/index.js +40 -1
- package/dist/src/index.js.map +1 -1
- package/dist/src/paramdecorators.d.ts +39 -0
- package/dist/src/paramdecorators.d.ts.map +1 -0
- package/dist/src/paramdecorators.js +365 -0
- package/dist/src/paramdecorators.js.map +1 -0
- package/dist/src/procedure.d.ts +18 -0
- package/dist/src/procedure.d.ts.map +1 -0
- package/dist/src/procedure.js +3 -0
- package/dist/src/procedure.js.map +1 -0
- package/dist/src/transaction.d.ts +19 -0
- package/dist/src/transaction.d.ts.map +1 -0
- package/dist/src/transaction.js +11 -0
- package/dist/src/transaction.js.map +1 -0
- package/dist/src/user_database.d.ts +176 -0
- package/dist/src/user_database.d.ts.map +1 -0
- package/dist/src/user_database.js +540 -0
- package/dist/src/user_database.js.map +1 -0
- package/dist/src/utils.d.ts +22 -1
- package/dist/src/utils.d.ts.map +1 -1
- package/dist/src/utils.js +172 -20
- package/dist/src/utils.js.map +1 -1
- package/dist/src/wfqueue.d.ts +2 -0
- package/dist/src/wfqueue.d.ts.map +1 -1
- package/dist/src/wfqueue.js +1 -1
- package/dist/src/wfqueue.js.map +1 -1
- package/dist/src/workflow.d.ts +8 -0
- package/dist/src/workflow.d.ts.map +1 -1
- package/dist/src/workflow.js +11 -0
- package/dist/src/workflow.js.map +1 -1
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +28 -4
- package/dist/src/adminserver.d.ts.map +0 -1
- package/dist/src/adminserver.js.map +0 -1
- package/dist/src/cli/cli.d.ts.map +0 -1
- package/dist/src/cli/cli.js.map +0 -1
- package/dist/src/cli/commands.d.ts.map +0 -1
- package/dist/src/cli/commands.js.map +0 -1
- package/dist/src/cli/docker_pg_helper.d.ts.map +0 -1
- package/dist/src/cli/docker_pg_helper.js.map +0 -1
- package/dist/src/cli/migrate.d.ts +0 -3
- package/dist/src/cli/migrate.d.ts.map +0 -1
- package/dist/src/cli/migrate.js.map +0 -1
- package/dist/src/config.d.ts.map +0 -1
- package/dist/src/config.js.map +0 -1
- package/dist/src/workflow_management.d.ts.map +0 -1
- package/dist/src/workflow_management.js.map +0 -1
- /package/dist/src/{cli → dbos-runtime}/commands.d.ts +0 -0
- /package/dist/src/{cli → dbos-runtime}/commands.js +0 -0
- /package/dist/src/{cli → dbos-runtime}/docker_pg_helper.d.ts +0 -0
- /package/dist/src/{cli → dbos-runtime}/docker_pg_helper.js +0 -0
@@ -22,27 +22,38 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
22
22
|
__setModuleDefault(result, mod);
|
23
23
|
return result;
|
24
24
|
};
|
25
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
26
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
27
|
+
};
|
25
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
26
29
|
exports.DBOSExecutor = exports.TempWorkflowType = exports.OperationType = exports.DBOS_QUEUE_MAX_PRIORITY = exports.DBOS_QUEUE_MIN_PRIORITY = void 0;
|
27
30
|
const error_1 = require("./error");
|
28
31
|
const workflow_1 = require("./workflow");
|
32
|
+
const transaction_1 = require("./transaction");
|
29
33
|
const collector_1 = require("./telemetry/collector");
|
30
34
|
const traces_1 = require("./telemetry/traces");
|
31
35
|
const logs_1 = require("./telemetry/logs");
|
32
36
|
const exporters_1 = require("./telemetry/exporters");
|
37
|
+
const pg_1 = require("pg");
|
33
38
|
const system_database_1 = require("./system_database");
|
34
39
|
const node_crypto_1 = require("node:crypto");
|
40
|
+
const user_database_1 = require("./user_database");
|
35
41
|
const decorators_1 = require("./decorators");
|
36
42
|
const api_1 = require("@opentelemetry/api");
|
43
|
+
const knex_1 = __importDefault(require("knex"));
|
37
44
|
const context_1 = require("./context");
|
38
45
|
const serialize_error_1 = require("serialize-error");
|
39
46
|
const utils_1 = require("./utils");
|
47
|
+
const node_path_1 = __importDefault(require("node:path"));
|
48
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
49
|
+
const url_1 = require("url");
|
40
50
|
const _1 = require(".");
|
41
51
|
const wfqueue_1 = require("./wfqueue");
|
42
52
|
const debugpoint_1 = require("./debugpoint");
|
43
53
|
const scheduler_1 = require("./scheduler/scheduler");
|
44
54
|
const crypto = __importStar(require("crypto"));
|
45
|
-
const workflow_management_1 = require("./workflow_management");
|
55
|
+
const workflow_management_1 = require("./dbos-runtime/workflow_management");
|
56
|
+
const utils_2 = require("./utils");
|
46
57
|
const database_utils_1 = require("./database_utils");
|
47
58
|
const debouncer_1 = require("./debouncer");
|
48
59
|
const dbosNull = {};
|
@@ -53,14 +64,20 @@ exports.OperationType = {
|
|
53
64
|
WORKFLOW: 'workflow',
|
54
65
|
TRANSACTION: 'transaction',
|
55
66
|
STEP: 'step',
|
67
|
+
PROCEDURE: 'procedure',
|
56
68
|
};
|
57
69
|
exports.TempWorkflowType = {
|
70
|
+
transaction: 'transaction',
|
71
|
+
procedure: 'procedure',
|
58
72
|
step: 'step',
|
59
73
|
send: 'send',
|
60
74
|
};
|
61
75
|
class DBOSExecutor {
|
62
76
|
config;
|
63
77
|
initialized;
|
78
|
+
// User Database
|
79
|
+
#userDatabase = undefined;
|
80
|
+
#procedurePool = undefined;
|
64
81
|
// System Database
|
65
82
|
systemDatabase;
|
66
83
|
// Temporary workflows are created by calling transaction/send/recv directly from the executor class
|
@@ -72,9 +89,42 @@ class DBOSExecutor {
|
|
72
89
|
logger;
|
73
90
|
ctxLogger;
|
74
91
|
tracer;
|
92
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-function-type
|
93
|
+
#typeormEntities = [];
|
94
|
+
#drizzleEntities = {};
|
95
|
+
#scheduler = new scheduler_1.ScheduledReceiver();
|
75
96
|
#wfqEnded = undefined;
|
76
97
|
executorID = utils_1.globalParams.executorID;
|
77
98
|
static globalInstance = undefined;
|
99
|
+
static async loadClasses(entrypoints) {
|
100
|
+
const allClasses = [];
|
101
|
+
for (const entrypoint of entrypoints) {
|
102
|
+
const operations = node_path_1.default.isAbsolute(entrypoint) ? entrypoint : node_path_1.default.join(process.cwd(), entrypoint);
|
103
|
+
let exports;
|
104
|
+
if (node_fs_1.default.existsSync(operations)) {
|
105
|
+
const operationsURL = (0, url_1.pathToFileURL)(operations).href;
|
106
|
+
exports = (await import(operationsURL));
|
107
|
+
}
|
108
|
+
else {
|
109
|
+
throw new error_1.DBOSFailLoadOperationsError(`Failed to load operations from the entrypoint ${entrypoint}`);
|
110
|
+
}
|
111
|
+
const classes = [];
|
112
|
+
for (const key in exports) {
|
113
|
+
const $export = exports[key];
|
114
|
+
if (isObject($export)) {
|
115
|
+
classes.push($export);
|
116
|
+
}
|
117
|
+
}
|
118
|
+
allClasses.push(...classes);
|
119
|
+
}
|
120
|
+
if (allClasses.length === 0) {
|
121
|
+
throw new error_1.DBOSFailLoadOperationsError('operations not found');
|
122
|
+
}
|
123
|
+
return allClasses;
|
124
|
+
function isObject(value) {
|
125
|
+
return typeof value === 'function' || (typeof value === 'object' && value !== null);
|
126
|
+
}
|
127
|
+
}
|
78
128
|
/* WORKFLOW EXECUTOR LIFE CYCLE MANAGEMENT */
|
79
129
|
constructor(config, { systemDatabase, debugMode } = {}) {
|
80
130
|
this.config = config;
|
@@ -93,6 +143,7 @@ class DBOSExecutor {
|
|
93
143
|
if (this.#debugMode) {
|
94
144
|
this.logger.info('Running in debug mode!');
|
95
145
|
}
|
146
|
+
this.#procedurePool = this.config.userDbClient ? new pg_1.Pool((0, utils_2.getClientConfig)(this.config.databaseUrl)) : undefined;
|
96
147
|
if (systemDatabase) {
|
97
148
|
this.logger.debug('Using provided system database'); // XXX print the name or something
|
98
149
|
this.systemDatabase = systemDatabase;
|
@@ -101,19 +152,129 @@ class DBOSExecutor {
|
|
101
152
|
this.logger.debug('Using Postgres system database');
|
102
153
|
this.systemDatabase = new system_database_1.PostgresSystemDatabase(this.config.systemDatabaseUrl, this.logger, this.config.sysDbPoolSize);
|
103
154
|
}
|
104
|
-
new scheduler_1.ScheduledReceiver(); // Create the scheduler, which registers itself.
|
105
155
|
this.initialized = false;
|
106
156
|
DBOSExecutor.globalInstance = this;
|
107
157
|
}
|
108
158
|
get appName() {
|
109
159
|
return this.config.name;
|
110
160
|
}
|
111
|
-
|
161
|
+
#configureDbClient() {
|
162
|
+
const userDbClient = this.config.userDbClient;
|
163
|
+
const userDBConfig = (0, utils_2.getClientConfig)(this.config.databaseUrl);
|
164
|
+
userDBConfig.max = this.config.userDbPoolSize ?? 20;
|
165
|
+
if (userDbClient === user_database_1.UserDatabaseName.PRISMA) {
|
166
|
+
// TODO: make Prisma work with debugger proxy.
|
167
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-require-imports
|
168
|
+
const { PrismaClient } = require(node_path_1.default.join(process.cwd(), 'node_modules', '@prisma', 'client')); // Find the prisma client in the node_modules of the current project
|
169
|
+
this.#userDatabase = new user_database_1.PrismaUserDatabase(
|
170
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-call
|
171
|
+
new PrismaClient({
|
172
|
+
datasources: {
|
173
|
+
db: {
|
174
|
+
url: userDBConfig.connectionString,
|
175
|
+
},
|
176
|
+
},
|
177
|
+
}));
|
178
|
+
this.logger.debug('Loaded Prisma user database');
|
179
|
+
}
|
180
|
+
else if (userDbClient === user_database_1.UserDatabaseName.TYPEORM) {
|
181
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-require-imports
|
182
|
+
const DataSourceExports = require('typeorm');
|
183
|
+
try {
|
184
|
+
this.#userDatabase = new user_database_1.TypeORMDatabase(
|
185
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
|
186
|
+
new DataSourceExports.DataSource({
|
187
|
+
type: 'postgres',
|
188
|
+
url: userDBConfig.connectionString,
|
189
|
+
connectTimeoutMS: userDBConfig.connectionTimeoutMillis,
|
190
|
+
entities: this.#typeormEntities,
|
191
|
+
poolSize: userDBConfig.max,
|
192
|
+
}));
|
193
|
+
}
|
194
|
+
catch (s) {
|
195
|
+
s.message = `Error loading TypeORM user database: ${s.message}`;
|
196
|
+
this.logger.error(s);
|
197
|
+
}
|
198
|
+
this.logger.debug('Loaded TypeORM user database');
|
199
|
+
}
|
200
|
+
else if (userDbClient === user_database_1.UserDatabaseName.KNEX) {
|
201
|
+
const knexConfig = {
|
202
|
+
client: 'postgres',
|
203
|
+
connection: (0, utils_2.getClientConfig)(this.config.databaseUrl),
|
204
|
+
pool: {
|
205
|
+
min: 0,
|
206
|
+
max: userDBConfig.max,
|
207
|
+
},
|
208
|
+
};
|
209
|
+
this.#userDatabase = new user_database_1.KnexUserDatabase((0, knex_1.default)(knexConfig));
|
210
|
+
this.logger.debug('Loaded Knex user database');
|
211
|
+
}
|
212
|
+
else if (userDbClient === user_database_1.UserDatabaseName.DRIZZLE) {
|
213
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-require-imports
|
214
|
+
const DrizzleExports = require('drizzle-orm/node-postgres');
|
215
|
+
const drizzlePool = new pg_1.Pool(userDBConfig);
|
216
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
|
217
|
+
const drizzle = DrizzleExports.drizzle(drizzlePool, { schema: this.#drizzleEntities });
|
218
|
+
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument
|
219
|
+
this.#userDatabase = new user_database_1.DrizzleUserDatabase(drizzlePool, drizzle);
|
220
|
+
this.logger.debug('Loaded Drizzle user database');
|
221
|
+
}
|
222
|
+
else {
|
223
|
+
this.#userDatabase = new user_database_1.PGNodeUserDatabase(userDBConfig);
|
224
|
+
this.logger.debug('Loaded Postgres user database');
|
225
|
+
}
|
226
|
+
}
|
227
|
+
async init(classes) {
|
112
228
|
if (this.initialized) {
|
113
229
|
this.logger.error('Workflow executor already initialized!');
|
114
230
|
return;
|
115
231
|
}
|
232
|
+
let classnames = [];
|
233
|
+
if (!classes || !classes.length) {
|
234
|
+
classnames = (0, decorators_1.getAllRegisteredClassNames)();
|
235
|
+
}
|
236
|
+
else {
|
237
|
+
classnames = classes.map((c) => (0, decorators_1.getNameForClass)(c));
|
238
|
+
}
|
116
239
|
try {
|
240
|
+
let length; // Track the length of the array (or number of keys of the object)
|
241
|
+
for (const clsname of classnames) {
|
242
|
+
const reg = (0, decorators_1.getClassRegistrationByName)(clsname);
|
243
|
+
/**
|
244
|
+
* With TSORM, we take an array of entities (Function[]) and add them to this.entities:
|
245
|
+
*/
|
246
|
+
if (Array.isArray(reg.ormEntities)) {
|
247
|
+
this.#typeormEntities = this.#typeormEntities.concat(reg.ormEntities);
|
248
|
+
length = reg.ormEntities.length;
|
249
|
+
}
|
250
|
+
else {
|
251
|
+
/**
|
252
|
+
* With Drizzle, we need to take an object of entities, since the object keys are used to access the entities from ctx.client.query:
|
253
|
+
*/
|
254
|
+
this.#drizzleEntities = { ...this.#drizzleEntities, ...reg.ormEntities };
|
255
|
+
length = Object.keys(reg.ormEntities).length;
|
256
|
+
}
|
257
|
+
this.logger.debug(`Loaded ${length} ORM entities`);
|
258
|
+
}
|
259
|
+
if (this.config.userDbClient) {
|
260
|
+
if (!this.#debugMode) {
|
261
|
+
const res = await (0, database_utils_1.ensurePGDatabase)({
|
262
|
+
urlToEnsure: this.config.databaseUrl,
|
263
|
+
logger: (msg) => this.logger.debug(msg),
|
264
|
+
});
|
265
|
+
if (res.status === 'failed') {
|
266
|
+
this.logger.warn(`Application database could not be verified / created: ${(0, database_utils_1.maskDatabaseUrl)(this.config.databaseUrl)}: ${res.message} ${res.hint ?? ''}\n ${res.notes.join('\n')}`);
|
267
|
+
}
|
268
|
+
}
|
269
|
+
this.#configureDbClient();
|
270
|
+
if (!this.#userDatabase) {
|
271
|
+
this.logger.error('No user database configured!');
|
272
|
+
throw new error_1.DBOSInitializationError('No user database configured!');
|
273
|
+
}
|
274
|
+
// Debug mode doesn't need to initialize the DBs. Everything should appear to be read-only.
|
275
|
+
await this.#userDatabase.init(this.#debugMode);
|
276
|
+
}
|
277
|
+
// Debug mode doesn't initialize the sys db
|
117
278
|
await this.systemDatabase.init(this.#debugMode);
|
118
279
|
}
|
119
280
|
catch (err) {
|
@@ -138,6 +299,20 @@ class DBOSExecutor {
|
|
138
299
|
this.initialized = true;
|
139
300
|
// Only execute init code if under non-debug mode
|
140
301
|
if (!this.#debugMode) {
|
302
|
+
for (const cls of classnames) {
|
303
|
+
// Init its configurations
|
304
|
+
const creg = (0, decorators_1.getClassRegistrationByName)(cls);
|
305
|
+
for (const [_cfgname, cfg] of creg.configuredInstances) {
|
306
|
+
await cfg.initialize(new _1.InitContext());
|
307
|
+
}
|
308
|
+
}
|
309
|
+
for (const v of (0, decorators_1.getAllRegisteredFunctions)()) {
|
310
|
+
const m = v;
|
311
|
+
if (m.init === true) {
|
312
|
+
this.logger.debug('Executing init method: ' + m.name);
|
313
|
+
await m.origFunction(new _1.InitContext());
|
314
|
+
}
|
315
|
+
}
|
141
316
|
// Compute the application version if not provided
|
142
317
|
if (utils_1.globalParams.appVersion === '') {
|
143
318
|
utils_1.globalParams.appVersion = this.computeAppVersion();
|
@@ -151,10 +326,34 @@ class DBOSExecutor {
|
|
151
326
|
}
|
152
327
|
this.logger.info('DBOS launched!');
|
153
328
|
}
|
329
|
+
#logNotice(msg) {
|
330
|
+
switch (msg.severity) {
|
331
|
+
case 'INFO':
|
332
|
+
case 'LOG':
|
333
|
+
case 'NOTICE':
|
334
|
+
this.logger.info(msg.message);
|
335
|
+
break;
|
336
|
+
case 'WARNING':
|
337
|
+
this.logger.warn(msg.message);
|
338
|
+
break;
|
339
|
+
case 'DEBUG':
|
340
|
+
this.logger.debug(msg.message);
|
341
|
+
break;
|
342
|
+
case 'ERROR':
|
343
|
+
case 'FATAL':
|
344
|
+
case 'PANIC':
|
345
|
+
this.logger.error(msg.message);
|
346
|
+
break;
|
347
|
+
default:
|
348
|
+
this.logger.error(`Unknown notice severity: ${msg.severity} - ${msg.message}`);
|
349
|
+
}
|
350
|
+
}
|
154
351
|
async destroy() {
|
155
352
|
try {
|
156
353
|
await this.systemDatabase.awaitRunningWorkflows();
|
157
354
|
await this.systemDatabase.destroy();
|
355
|
+
await this.#userDatabase?.destroy();
|
356
|
+
await this.#procedurePool?.end();
|
158
357
|
await this.logger.destroy();
|
159
358
|
if (DBOSExecutor.globalInstance === this) {
|
160
359
|
DBOSExecutor.globalInstance = undefined;
|
@@ -166,6 +365,24 @@ class DBOSExecutor {
|
|
166
365
|
throw err;
|
167
366
|
}
|
168
367
|
}
|
368
|
+
async createUserSchema() {
|
369
|
+
if (!this.#userDatabase) {
|
370
|
+
throw new Error('User database not enabled.');
|
371
|
+
}
|
372
|
+
await this.#userDatabase.createSchema();
|
373
|
+
}
|
374
|
+
async dropUserSchema() {
|
375
|
+
if (!this.#userDatabase) {
|
376
|
+
throw new Error('User database not enabled.');
|
377
|
+
}
|
378
|
+
await this.#userDatabase.dropSchema();
|
379
|
+
}
|
380
|
+
async queryUserDbFunction(queryFunction, ...params) {
|
381
|
+
if (!this.#userDatabase) {
|
382
|
+
throw new Error('User database not enabled.');
|
383
|
+
}
|
384
|
+
return await this.#userDatabase.queryFunction(queryFunction, ...params);
|
385
|
+
}
|
169
386
|
// This could return WF, or the function underlying a temp wf
|
170
387
|
#getFunctionInfoFromWFStatus(wf) {
|
171
388
|
const methReg = (0, decorators_1.getFunctionRegistrationByName)(wf.workflowClassName, wf.workflowName);
|
@@ -203,7 +420,7 @@ class DBOSExecutor {
|
|
203
420
|
if (params.queueName) {
|
204
421
|
const wfqueue = this.#getQueueByName(params.queueName);
|
205
422
|
if (!wfqueue.priorityEnabled && priority !== undefined) {
|
206
|
-
|
423
|
+
throw Error(`Priority is not enabled for queue ${params.queueName}. Setting priority will not have any effect.`);
|
207
424
|
}
|
208
425
|
}
|
209
426
|
const pctx = { ...(0, context_1.getCurrentContextStore)() }; // function ID was already incremented...
|
@@ -229,6 +446,8 @@ class DBOSExecutor {
|
|
229
446
|
assumedRole: pctx?.assumedRole ?? '',
|
230
447
|
});
|
231
448
|
const isTempWorkflow = DBOSExecutor.#tempWorkflowName === wfname;
|
449
|
+
const funcArgs = (0, utils_1.serializeFunctionInputOutput)(args, [wfname, '<arguments>']);
|
450
|
+
args = funcArgs.deserialized;
|
232
451
|
const internalStatus = {
|
233
452
|
workflowUUID: workflowID,
|
234
453
|
status: params.queueName !== undefined ? workflow_1.StatusString.ENQUEUED : workflow_1.StatusString.PENDING,
|
@@ -248,7 +467,7 @@ class DBOSExecutor {
|
|
248
467
|
createdAt: Date.now(), // Remember the start time of this workflow,
|
249
468
|
timeoutMS: timeoutMS,
|
250
469
|
deadlineEpochMS: deadlineEpochMS,
|
251
|
-
input:
|
470
|
+
input: funcArgs.stringified,
|
252
471
|
deduplicationID: params.enqueueOptions?.deduplicationID,
|
253
472
|
priority: priority ?? 0,
|
254
473
|
};
|
@@ -266,8 +485,8 @@ class DBOSExecutor {
|
|
266
485
|
throw new error_1.DBOSDebuggerError(`Failed to find inputs for workflow UUID ${workflowID}`);
|
267
486
|
}
|
268
487
|
// Make sure we use the same input.
|
269
|
-
if (
|
270
|
-
throw new error_1.DBOSDebuggerError(`Detected different inputs for workflow UUID ${workflowID}.\n Received: ${
|
488
|
+
if (funcArgs.stringified !== wfStatus.input) {
|
489
|
+
throw new error_1.DBOSDebuggerError(`Detected different inputs for workflow UUID ${workflowID}.\n Received: ${funcArgs.stringified}\n Original: ${wfStatus.input}`);
|
271
490
|
}
|
272
491
|
status = wfStatus.status;
|
273
492
|
}
|
@@ -356,6 +575,12 @@ class DBOSExecutor {
|
|
356
575
|
});
|
357
576
|
});
|
358
577
|
if (this.#debugMode) {
|
578
|
+
function resultsMatch(recordedResult, callResult) {
|
579
|
+
if (recordedResult === null) {
|
580
|
+
return callResult === undefined || callResult === null;
|
581
|
+
}
|
582
|
+
return utils_1.DBOSJSON.stringify(recordedResult) === utils_1.DBOSJSON.stringify(callResult);
|
583
|
+
}
|
359
584
|
const recordedResult = DBOSExecutor.reviveResultOrError((await this.systemDatabase.awaitWorkflowResult(workflowID)));
|
360
585
|
if (!resultsMatch(recordedResult, callResult)) {
|
361
586
|
this.logger.error(`Detect different output for the workflow UUID ${workflowID}!\n Received: ${utils_1.DBOSJSON.stringify(callResult)}\n Original: ${utils_1.DBOSJSON.stringify(recordedResult)}`);
|
@@ -365,13 +590,9 @@ class DBOSExecutor {
|
|
365
590
|
else {
|
366
591
|
result = callResult;
|
367
592
|
}
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
}
|
372
|
-
return utils_1.DBOSJSON.stringify(recordedResult) === utils_1.DBOSJSON.stringify(callResult);
|
373
|
-
}
|
374
|
-
internalStatus.output = utils_1.DBOSJSON.stringify(result);
|
593
|
+
const funcResult = (0, utils_1.serializeFunctionInputOutput)(result, [wfname, '<result>']);
|
594
|
+
result = funcResult.deserialized;
|
595
|
+
internalStatus.output = funcResult.stringified;
|
375
596
|
internalStatus.status = workflow_1.StatusString.SUCCESS;
|
376
597
|
if (!this.#debugMode) {
|
377
598
|
await this.systemDatabase.recordWorkflowOutput(workflowID, internalStatus);
|
@@ -426,6 +647,510 @@ class DBOSExecutor {
|
|
426
647
|
throw new error_1.DBOSNotRegisteredError(name, `Workflow queue '${name}' is not defined.`);
|
427
648
|
return q;
|
428
649
|
}
|
650
|
+
/**
|
651
|
+
* Retrieve the transaction snapshot information of the current transaction
|
652
|
+
*/
|
653
|
+
static async #retrieveSnapshot(query) {
|
654
|
+
const rows = await query('SELECT pg_current_snapshot()::text as txn_snapshot;', []);
|
655
|
+
return rows[0].txn_snapshot;
|
656
|
+
}
|
657
|
+
/**
|
658
|
+
* Check if an operation has already executed in a workflow.
|
659
|
+
* If it previously executed successfully, return its output.
|
660
|
+
* If it previously executed and threw an error, return that error.
|
661
|
+
* Otherwise, return DBOSNull.
|
662
|
+
* Also return the transaction snapshot and id information of the original or current transaction.
|
663
|
+
*/
|
664
|
+
async #checkExecution(query, workflowUUID, funcID, funcName) {
|
665
|
+
const rows = await query(`(SELECT output, error, txn_snapshot, txn_id, function_name, true as recorded
|
666
|
+
FROM dbos.transaction_outputs
|
667
|
+
WHERE workflow_uuid=$1 AND function_id=$2
|
668
|
+
UNION ALL
|
669
|
+
SELECT null as output, null as error, pg_current_snapshot()::text as txn_snapshot,
|
670
|
+
null as txn_id, '' as function_name, false as recorded
|
671
|
+
) ORDER BY recorded`, [workflowUUID, funcID]);
|
672
|
+
if (rows.length === 0 || rows.length > 2) {
|
673
|
+
const returnedRows = JSON.stringify(rows);
|
674
|
+
this.logger.error('Unexpected! This should never happen. Returned rows: ' + returnedRows);
|
675
|
+
throw new error_1.DBOSError('This should never happen. Returned rows: ' + returnedRows);
|
676
|
+
}
|
677
|
+
if (rows.length === 2) {
|
678
|
+
if (rows[1].function_name !== funcName) {
|
679
|
+
throw new error_1.DBOSUnexpectedStepError(workflowUUID, funcID, funcName, rows[0].function_name);
|
680
|
+
}
|
681
|
+
const { txn_snapshot, txn_id } = rows[1];
|
682
|
+
const error = utils_1.DBOSJSON.parse(rows[1].error);
|
683
|
+
if (error) {
|
684
|
+
return { result: (0, serialize_error_1.deserializeError)(error), txn_snapshot, txn_id: txn_id ?? undefined };
|
685
|
+
}
|
686
|
+
else {
|
687
|
+
return { result: utils_1.DBOSJSON.parse(rows[1].output), txn_snapshot, txn_id: txn_id ?? undefined };
|
688
|
+
}
|
689
|
+
}
|
690
|
+
else {
|
691
|
+
const { txn_snapshot } = rows[0];
|
692
|
+
return { result: dbosNull, txn_snapshot, txn_id: undefined };
|
693
|
+
}
|
694
|
+
}
|
695
|
+
/**
|
696
|
+
* Write a operation's output to the database.
|
697
|
+
*/
|
698
|
+
async #recordOutput(query, workflowUUID, funcID, txnSnapshot, serialOutput, isKeyConflict, function_name) {
|
699
|
+
if (this.#debugMode) {
|
700
|
+
throw new error_1.DBOSDebuggerError('Cannot record output in debug mode.');
|
701
|
+
}
|
702
|
+
try {
|
703
|
+
const rows = await query('INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, output, txn_id, txn_snapshot, created_at, function_name) VALUES ($1, $2, $3, (select pg_current_xact_id_if_assigned()::text), $4, $5, $6) RETURNING txn_id;', [workflowUUID, funcID, serialOutput, txnSnapshot, Date.now(), function_name]);
|
704
|
+
return rows[0].txn_id;
|
705
|
+
}
|
706
|
+
catch (error) {
|
707
|
+
if (isKeyConflict(error)) {
|
708
|
+
// Serialization and primary key conflict (Postgres).
|
709
|
+
throw new error_1.DBOSWorkflowConflictError(workflowUUID);
|
710
|
+
}
|
711
|
+
else {
|
712
|
+
throw error;
|
713
|
+
}
|
714
|
+
}
|
715
|
+
}
|
716
|
+
/**
|
717
|
+
* Record an error in an operation to the database.
|
718
|
+
*/
|
719
|
+
async #recordError(query, workflowUUID, funcID, txnSnapshot, err, isKeyConflict, function_name) {
|
720
|
+
if (this.#debugMode) {
|
721
|
+
throw new error_1.DBOSDebuggerError('Cannot record error in debug mode.');
|
722
|
+
}
|
723
|
+
try {
|
724
|
+
const serialErr = utils_1.DBOSJSON.stringify((0, serialize_error_1.serializeError)(err));
|
725
|
+
await query('INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, error, txn_id, txn_snapshot, created_at, function_name) VALUES ($1, $2, $3, null, $4, $5, $6) RETURNING txn_id;', [workflowUUID, funcID, serialErr, txnSnapshot, Date.now(), function_name]);
|
726
|
+
}
|
727
|
+
catch (error) {
|
728
|
+
if (isKeyConflict(error)) {
|
729
|
+
// Serialization and primary key conflict (Postgres).
|
730
|
+
throw new error_1.DBOSWorkflowConflictError(workflowUUID);
|
731
|
+
}
|
732
|
+
else {
|
733
|
+
throw error;
|
734
|
+
}
|
735
|
+
}
|
736
|
+
}
|
737
|
+
async getTransactions(workflowUUID) {
|
738
|
+
if (this.#userDatabase) {
|
739
|
+
const rows = await this.#userDatabase.query(`SELECT function_id, function_name, output, error FROM ${DBOSExecutor.systemDBSchemaName}.transaction_outputs WHERE workflow_uuid=$1`, workflowUUID);
|
740
|
+
for (const row of rows) {
|
741
|
+
row.output = row.output !== null ? utils_1.DBOSJSON.parse(row.output) : null;
|
742
|
+
row.error = row.error !== null ? (0, serialize_error_1.deserializeError)(utils_1.DBOSJSON.parse(row.error)) : null;
|
743
|
+
}
|
744
|
+
return rows;
|
745
|
+
}
|
746
|
+
else {
|
747
|
+
return [];
|
748
|
+
}
|
749
|
+
}
|
750
|
+
async runTransactionTempWF(txn, params, ...args) {
|
751
|
+
return await (await this.startTransactionTempWF(txn, params, undefined, undefined, ...args)).getResult();
|
752
|
+
}
|
753
|
+
async startTransactionTempWF(txn, params, callerWFID, callerFunctionID, ...args) {
|
754
|
+
// Create a workflow and call transaction.
|
755
|
+
const temp_workflow = async (...args) => {
|
756
|
+
return await this.callTransactionFunction(txn, params.configuredInstance ?? null, ...args);
|
757
|
+
};
|
758
|
+
return await this.internalWorkflow(temp_workflow, {
|
759
|
+
...params,
|
760
|
+
tempWfType: exports.TempWorkflowType.transaction,
|
761
|
+
tempWfName: (0, decorators_1.getRegisteredFunctionName)(txn),
|
762
|
+
tempWfClass: (0, decorators_1.getRegisteredFunctionClassName)(txn),
|
763
|
+
}, callerWFID, callerFunctionID, ...args);
|
764
|
+
}
|
765
|
+
async callTransactionFunction(txn, clsinst, ...args) {
|
766
|
+
const userDB = this.#userDatabase;
|
767
|
+
if (!userDB) {
|
768
|
+
throw new Error('No user database configured for transactions.');
|
769
|
+
}
|
770
|
+
const txnReg = (0, decorators_1.getFunctionRegistration)(txn);
|
771
|
+
if (!txnReg || !txnReg.txnConfig) {
|
772
|
+
throw new error_1.DBOSNotRegisteredError(txn.name);
|
773
|
+
}
|
774
|
+
const funcId = (0, context_1.functionIDGetIncrement)();
|
775
|
+
const pctx = { ...(0, context_1.getCurrentContextStore)() };
|
776
|
+
const wfid = pctx.workflowId;
|
777
|
+
await this.systemDatabase.checkIfCanceled(wfid);
|
778
|
+
let retryWaitMillis = 1;
|
779
|
+
const backoffFactor = 1.5;
|
780
|
+
const maxRetryWaitMs = 2000; // Maximum wait 2 seconds.
|
781
|
+
const span = this.tracer.startSpan(txn.name, {
|
782
|
+
operationUUID: wfid,
|
783
|
+
operationType: exports.OperationType.TRANSACTION,
|
784
|
+
operationName: txn.name,
|
785
|
+
authenticatedUser: pctx.authenticatedUser ?? '',
|
786
|
+
assumedRole: pctx.assumedRole ?? '',
|
787
|
+
authenticatedRoles: pctx.authenticatedRoles ?? [],
|
788
|
+
isolationLevel: txnReg.txnConfig.isolationLevel,
|
789
|
+
});
|
790
|
+
while (true) {
|
791
|
+
await this.systemDatabase.checkIfCanceled(wfid);
|
792
|
+
let txn_snapshot = 'invalid';
|
793
|
+
let prevResultFound = false;
|
794
|
+
const wrappedTransaction = async (client) => {
|
795
|
+
// If the UUID is preset, it is possible this execution previously happened. Check, and return its original result if it did.
|
796
|
+
// Note: It is possible to retrieve a generated ID from a workflow handle, run a concurrent execution, and cause trouble for yourself. We recommend against this.
|
797
|
+
let prevResult = dbosNull;
|
798
|
+
const queryFunc = (sql, args) => userDB.queryWithClient(client, sql, ...args);
|
799
|
+
if (pctx.presetID) {
|
800
|
+
const executionResult = await this.#checkExecution(queryFunc, wfid, funcId, txn.name);
|
801
|
+
prevResult = executionResult.result;
|
802
|
+
txn_snapshot = executionResult.txn_snapshot;
|
803
|
+
if (prevResult !== dbosNull) {
|
804
|
+
prevResultFound = true;
|
805
|
+
span.setAttribute('cached', true);
|
806
|
+
// Return/throw the previous result
|
807
|
+
if (prevResult instanceof Error) {
|
808
|
+
throw prevResult;
|
809
|
+
}
|
810
|
+
else {
|
811
|
+
return prevResult;
|
812
|
+
}
|
813
|
+
}
|
814
|
+
}
|
815
|
+
else {
|
816
|
+
// Collect snapshot information for read-only transactions and non-preset UUID transactions, if not already collected above
|
817
|
+
txn_snapshot = await DBOSExecutor.#retrieveSnapshot(queryFunc);
|
818
|
+
}
|
819
|
+
if (this.#debugMode && prevResult === dbosNull) {
|
820
|
+
throw new error_1.DBOSDebuggerError(`Failed to find the recorded output for the transaction: workflow UUID ${wfid}, step number ${funcId}`);
|
821
|
+
}
|
822
|
+
// Execute the user's transaction.
|
823
|
+
const ctxlog = this.ctxLogger;
|
824
|
+
const result = await (async function () {
|
825
|
+
try {
|
826
|
+
return await api_1.context.with(api_1.trace.setSpan(api_1.context.active(), span), async () => {
|
827
|
+
return await (0, context_1.runWithParentContext)(pctx, {
|
828
|
+
authenticatedRoles: pctx?.authenticatedRoles,
|
829
|
+
authenticatedUser: pctx?.authenticatedUser,
|
830
|
+
workflowId: wfid,
|
831
|
+
curTxFunctionId: funcId,
|
832
|
+
parentCtx: pctx,
|
833
|
+
sqlClient: client,
|
834
|
+
logger: ctxlog,
|
835
|
+
}, async () => {
|
836
|
+
const tf = txn;
|
837
|
+
return await tf.call(clsinst, ...args);
|
838
|
+
});
|
839
|
+
});
|
840
|
+
}
|
841
|
+
catch (e) {
|
842
|
+
return e instanceof Error ? e : new Error(`${e}`);
|
843
|
+
}
|
844
|
+
})();
|
845
|
+
if (this.#debugMode) {
|
846
|
+
if (prevResult instanceof Error) {
|
847
|
+
throw prevResult;
|
848
|
+
}
|
849
|
+
const prevResultJson = utils_1.DBOSJSON.stringify(prevResult);
|
850
|
+
const resultJson = utils_1.DBOSJSON.stringify(result);
|
851
|
+
if (prevResultJson !== resultJson) {
|
852
|
+
this.logger.error(`Detected different transaction output than the original one!\n Result: ${resultJson}\n Original: ${utils_1.DBOSJSON.stringify(prevResultJson)}`);
|
853
|
+
}
|
854
|
+
return prevResult;
|
855
|
+
}
|
856
|
+
if (result instanceof Error) {
|
857
|
+
throw result;
|
858
|
+
}
|
859
|
+
// Record the execution, commit, and return.
|
860
|
+
const funcOutput = (0, utils_1.serializeFunctionInputOutput)(result, [txn.name, '<result>']);
|
861
|
+
try {
|
862
|
+
// Synchronously record the output of write transactions and obtain the transaction ID.
|
863
|
+
const pg_txn_id = await this.#recordOutput(queryFunc, wfid, funcId, txn_snapshot, funcOutput.stringified, (error) => userDB.isKeyConflictError(error), txn.name);
|
864
|
+
span.setAttribute('pg_txn_id', pg_txn_id);
|
865
|
+
}
|
866
|
+
catch (error) {
|
867
|
+
if (userDB.isFailedSqlTransactionError(error)) {
|
868
|
+
this.logger.error(`Postgres aborted the ${txn.name} @DBOS.transaction of Workflow ${wfid}, but the function did not raise an exception. Please ensure that the @DBOS.transaction method raises an exception if the database transaction is aborted.`);
|
869
|
+
throw new error_1.DBOSFailedSqlTransactionError(wfid, txn.name);
|
870
|
+
}
|
871
|
+
else {
|
872
|
+
throw error;
|
873
|
+
}
|
874
|
+
}
|
875
|
+
return funcOutput.deserialized;
|
876
|
+
};
|
877
|
+
try {
|
878
|
+
const result = await userDB.transaction(wrappedTransaction, txnReg.txnConfig);
|
879
|
+
span.setStatus({ code: api_1.SpanStatusCode.OK });
|
880
|
+
this.tracer.endSpan(span);
|
881
|
+
return result;
|
882
|
+
}
|
883
|
+
catch (err) {
|
884
|
+
const e = err;
|
885
|
+
if (!prevResultFound && !this.#debugMode && !(e instanceof error_1.DBOSUnexpectedStepError)) {
|
886
|
+
if (userDB.isRetriableTransactionError(err)) {
|
887
|
+
// serialization_failure in PostgreSQL
|
888
|
+
span.addEvent('TXN SERIALIZATION FAILURE', { retryWaitMillis: retryWaitMillis }, performance.now());
|
889
|
+
// Retry serialization failures.
|
890
|
+
await (0, utils_1.sleepms)(retryWaitMillis);
|
891
|
+
retryWaitMillis *= backoffFactor;
|
892
|
+
retryWaitMillis = retryWaitMillis < maxRetryWaitMs ? retryWaitMillis : maxRetryWaitMs;
|
893
|
+
continue;
|
894
|
+
}
|
895
|
+
// Record and throw other errors.
|
896
|
+
const e = err;
|
897
|
+
await userDB.transaction(async (client) => {
|
898
|
+
const func = (sql, args) => userDB.queryWithClient(client, sql, ...args);
|
899
|
+
await this.#recordError(func, wfid, funcId, txn_snapshot, e, (error) => userDB.isKeyConflictError(error), txn.name);
|
900
|
+
}, { isolationLevel: transaction_1.IsolationLevel.ReadCommitted });
|
901
|
+
}
|
902
|
+
span.setStatus({ code: api_1.SpanStatusCode.ERROR, message: e.message });
|
903
|
+
this.tracer.endSpan(span);
|
904
|
+
throw err;
|
905
|
+
}
|
906
|
+
}
|
907
|
+
}
|
908
|
+
async runProcedureTempWF(proc, params, ...args) {
|
909
|
+
// Create a workflow and call procedure.
|
910
|
+
const temp_workflow = async (...args) => {
|
911
|
+
return this.callProcedureFunction(proc, ...args);
|
912
|
+
};
|
913
|
+
return await (await this.workflow(temp_workflow, {
|
914
|
+
...params,
|
915
|
+
tempWfType: exports.TempWorkflowType.procedure,
|
916
|
+
tempWfName: (0, decorators_1.getRegisteredFunctionName)(proc),
|
917
|
+
tempWfClass: (0, decorators_1.getRegisteredFunctionClassName)(proc),
|
918
|
+
}, ...args)).getResult();
|
919
|
+
}
|
920
|
+
async callProcedureFunction(proc, ...args) {
|
921
|
+
const procInfo = (0, decorators_1.getFunctionRegistration)(proc);
|
922
|
+
if (!procInfo || !procInfo.procConfig) {
|
923
|
+
throw new error_1.DBOSNotRegisteredError(proc.name);
|
924
|
+
}
|
925
|
+
const procConfig = procInfo.procConfig;
|
926
|
+
const pctx = (0, context_1.getCurrentContextStore)();
|
927
|
+
const wfid = pctx.workflowId;
|
928
|
+
await this.systemDatabase.checkIfCanceled(wfid);
|
929
|
+
const executeLocally = this.#debugMode || (procConfig.executeLocally ?? false);
|
930
|
+
const funcId = (0, context_1.functionIDGetIncrement)();
|
931
|
+
const span = this.tracer.startSpan(proc.name, {
|
932
|
+
operationUUID: wfid,
|
933
|
+
operationType: exports.OperationType.PROCEDURE,
|
934
|
+
operationName: proc.name,
|
935
|
+
authenticatedUser: pctx.authenticatedUser ?? '',
|
936
|
+
assumedRole: pctx.assumedRole ?? '',
|
937
|
+
authenticatedRoles: pctx.authenticatedRoles ?? [],
|
938
|
+
isolationLevel: procInfo.procConfig.isolationLevel,
|
939
|
+
executeLocally,
|
940
|
+
});
|
941
|
+
try {
|
942
|
+
const result = executeLocally
|
943
|
+
? await this.#callProcedureFunctionLocal(proc, args, span, procInfo, funcId)
|
944
|
+
: await this.#callProcedureFunctionRemote(proc, args, span, procConfig, funcId);
|
945
|
+
span.setStatus({ code: api_1.SpanStatusCode.OK });
|
946
|
+
return result;
|
947
|
+
}
|
948
|
+
catch (e) {
|
949
|
+
const { message } = e;
|
950
|
+
span.setStatus({ code: api_1.SpanStatusCode.ERROR, message });
|
951
|
+
throw e;
|
952
|
+
}
|
953
|
+
finally {
|
954
|
+
this.tracer.endSpan(span);
|
955
|
+
}
|
956
|
+
}
|
957
|
+
async #callProcedureFunctionLocal(proc, args, span, procInfo, funcId) {
|
958
|
+
const procPool = this.#procedurePool;
|
959
|
+
const userDB = this.#userDatabase;
|
960
|
+
if (!procPool || !userDB) {
|
961
|
+
throw new Error('User database not enabled.');
|
962
|
+
}
|
963
|
+
let retryWaitMillis = 1;
|
964
|
+
const backoffFactor = 1.5;
|
965
|
+
const maxRetryWaitMs = 2000; // Maximum wait 2 seconds.
|
966
|
+
const pctx = { ...(0, context_1.getCurrentContextStore)() };
|
967
|
+
const wfid = pctx.workflowId;
|
968
|
+
while (true) {
|
969
|
+
await this.systemDatabase.checkIfCanceled(wfid);
|
970
|
+
let txn_snapshot = 'invalid';
|
971
|
+
const wrappedProcedure = async (client) => {
|
972
|
+
let prevResult = dbosNull;
|
973
|
+
const queryFunc = (sql, args) => procPool.query(sql, args).then((v) => v.rows);
|
974
|
+
if (pctx.presetID) {
|
975
|
+
const executionResult = await this.#checkExecution(queryFunc, wfid, funcId, proc.name);
|
976
|
+
prevResult = executionResult.result;
|
977
|
+
txn_snapshot = executionResult.txn_snapshot;
|
978
|
+
if (prevResult !== dbosNull) {
|
979
|
+
span.setAttribute('cached', true);
|
980
|
+
// Return/throw the previous result
|
981
|
+
if (prevResult instanceof Error) {
|
982
|
+
throw prevResult;
|
983
|
+
}
|
984
|
+
else {
|
985
|
+
return prevResult;
|
986
|
+
}
|
987
|
+
}
|
988
|
+
}
|
989
|
+
else {
|
990
|
+
// Collect snapshot information for read-only transactions and non-preset UUID transactions, if not already collected above
|
991
|
+
txn_snapshot = await DBOSExecutor.#retrieveSnapshot(queryFunc);
|
992
|
+
}
|
993
|
+
if (this.#debugMode && prevResult === dbosNull) {
|
994
|
+
throw new error_1.DBOSDebuggerError(`Failed to find the recorded output for the procedure: workflow UUID ${wfid}, step number ${funcId}`);
|
995
|
+
}
|
996
|
+
// Execute the user's transaction.
|
997
|
+
const ctxlog = this.ctxLogger;
|
998
|
+
const result = await (async function () {
|
999
|
+
try {
|
1000
|
+
// Check we are in a workflow context and not in a step / transaction already
|
1001
|
+
if (!pctx)
|
1002
|
+
throw new error_1.DBOSInvalidWorkflowTransitionError();
|
1003
|
+
if (!(0, context_1.isInWorkflowCtx)(pctx))
|
1004
|
+
throw new error_1.DBOSInvalidWorkflowTransitionError();
|
1005
|
+
return await api_1.context.with(api_1.trace.setSpan(api_1.context.active(), span), async () => {
|
1006
|
+
return await (0, context_1.runWithParentContext)(pctx, {
|
1007
|
+
curTxFunctionId: funcId,
|
1008
|
+
parentCtx: pctx,
|
1009
|
+
isInStoredProc: true,
|
1010
|
+
sqlClient: client,
|
1011
|
+
logger: ctxlog,
|
1012
|
+
}, async () => {
|
1013
|
+
const pf = proc;
|
1014
|
+
return await pf(...args);
|
1015
|
+
});
|
1016
|
+
});
|
1017
|
+
}
|
1018
|
+
catch (e) {
|
1019
|
+
return e instanceof Error ? e : new Error(`${e}`);
|
1020
|
+
}
|
1021
|
+
})();
|
1022
|
+
if (this.#debugMode) {
|
1023
|
+
if (prevResult instanceof Error) {
|
1024
|
+
throw prevResult;
|
1025
|
+
}
|
1026
|
+
const prevResultJson = utils_1.DBOSJSON.stringify(prevResult);
|
1027
|
+
const resultJson = utils_1.DBOSJSON.stringify(result);
|
1028
|
+
if (prevResultJson !== resultJson) {
|
1029
|
+
this.logger.error(`Detected different transaction output than the original one!\n Result: ${resultJson}\n Original: ${utils_1.DBOSJSON.stringify(prevResultJson)}`);
|
1030
|
+
}
|
1031
|
+
return prevResult;
|
1032
|
+
}
|
1033
|
+
if (result instanceof Error) {
|
1034
|
+
throw result;
|
1035
|
+
}
|
1036
|
+
// Synchronously record the output of write transactions and obtain the transaction ID.
|
1037
|
+
const func = (sql, args) => client.query(sql, args).then((v) => v.rows);
|
1038
|
+
const funcResult = (0, utils_1.serializeFunctionInputOutput)(result, [proc.name, '<result>']);
|
1039
|
+
const pg_txn_id = await this.#recordOutput(func, wfid, funcId, txn_snapshot, funcResult.stringified, user_database_1.pgNodeIsKeyConflictError, proc.name);
|
1040
|
+
// const pg_txn_id = await wfCtx.recordOutputProc<R>(client, funcId, txn_snapshot, result);
|
1041
|
+
span.setAttribute('pg_txn_id', pg_txn_id);
|
1042
|
+
return funcResult.deserialized;
|
1043
|
+
};
|
1044
|
+
try {
|
1045
|
+
const result = await this.invokeStoredProcFunction(wrappedProcedure, {
|
1046
|
+
isolationLevel: procInfo.procConfig.isolationLevel,
|
1047
|
+
});
|
1048
|
+
span.setStatus({ code: api_1.SpanStatusCode.OK });
|
1049
|
+
return result;
|
1050
|
+
}
|
1051
|
+
catch (err) {
|
1052
|
+
if (!this.#debugMode) {
|
1053
|
+
if (userDB.isRetriableTransactionError(err)) {
|
1054
|
+
// serialization_failure in PostgreSQL
|
1055
|
+
span.addEvent('TXN SERIALIZATION FAILURE', { retryWaitMillis: retryWaitMillis }, performance.now());
|
1056
|
+
// Retry serialization failures.
|
1057
|
+
await (0, utils_1.sleepms)(retryWaitMillis);
|
1058
|
+
retryWaitMillis *= backoffFactor;
|
1059
|
+
retryWaitMillis = retryWaitMillis < maxRetryWaitMs ? retryWaitMillis : maxRetryWaitMs;
|
1060
|
+
continue;
|
1061
|
+
}
|
1062
|
+
// Record and throw other errors.
|
1063
|
+
const e = err;
|
1064
|
+
await this.invokeStoredProcFunction(async (client) => {
|
1065
|
+
const func = (sql, args) => client.query(sql, args).then((v) => v.rows);
|
1066
|
+
await this.#recordError(func, wfid, funcId, txn_snapshot, e, user_database_1.pgNodeIsKeyConflictError, proc.name);
|
1067
|
+
}, { isolationLevel: transaction_1.IsolationLevel.ReadCommitted });
|
1068
|
+
await userDB.transaction(async (client) => {
|
1069
|
+
const func = (sql, args) => userDB.queryWithClient(client, sql, ...args);
|
1070
|
+
await this.#recordError(func, wfid, funcId, txn_snapshot, e, (error) => userDB.isKeyConflictError(error), proc.name);
|
1071
|
+
}, { isolationLevel: transaction_1.IsolationLevel.ReadCommitted });
|
1072
|
+
}
|
1073
|
+
throw err;
|
1074
|
+
}
|
1075
|
+
}
|
1076
|
+
}
|
1077
|
+
async #callProcedureFunctionRemote(proc, args, span, config, funcId) {
|
1078
|
+
if (this.#debugMode) {
|
1079
|
+
throw new error_1.DBOSDebuggerError("Can't invoke stored procedure in debug mode.");
|
1080
|
+
}
|
1081
|
+
const pctx = (0, context_1.getCurrentContextStore)();
|
1082
|
+
const wfid = pctx.workflowId;
|
1083
|
+
await this.systemDatabase.checkIfCanceled(wfid);
|
1084
|
+
const $jsonCtx = {
|
1085
|
+
request: pctx.request,
|
1086
|
+
authenticatedUser: pctx.authenticatedUser,
|
1087
|
+
authenticatedRoles: pctx.authenticatedRoles,
|
1088
|
+
assumedRole: pctx.assumedRole,
|
1089
|
+
};
|
1090
|
+
// TODO (Qian/Harry): remove this unshift when we remove the resultBuffer argument
|
1091
|
+
// Note, node-pg converts JS arrays to postgres array literals, so must call JSON.strigify on
|
1092
|
+
// args and bufferedResults before being passed to #invokeStoredProc
|
1093
|
+
const $args = [wfid, funcId, pctx.presetID, $jsonCtx, null, JSON.stringify(args)];
|
1094
|
+
const readonly = config.readOnly ?? false;
|
1095
|
+
if (!readonly) {
|
1096
|
+
$args.unshift(null);
|
1097
|
+
}
|
1098
|
+
const [{ return_value }] = await this.#invokeStoredProc(proc, $args);
|
1099
|
+
const { error, output, txn_id } = return_value;
|
1100
|
+
// if the stored proc returns an error, deserialize and throw it.
|
1101
|
+
// stored proc saves the error in tx_output before returning
|
1102
|
+
if (error) {
|
1103
|
+
throw (0, serialize_error_1.deserializeError)(error);
|
1104
|
+
}
|
1105
|
+
if (txn_id) {
|
1106
|
+
span.setAttribute('pg_txn_id', txn_id);
|
1107
|
+
}
|
1108
|
+
span.setStatus({ code: api_1.SpanStatusCode.OK });
|
1109
|
+
return output;
|
1110
|
+
}
|
1111
|
+
async #invokeStoredProc(proc, args) {
|
1112
|
+
if (!this.#procedurePool) {
|
1113
|
+
throw new Error('User Database not enabled.');
|
1114
|
+
}
|
1115
|
+
const client = await this.#procedurePool.connect();
|
1116
|
+
const log = (msg) => this.#logNotice(msg);
|
1117
|
+
const procname = (0, decorators_1.getRegisteredFunctionFullName)(proc);
|
1118
|
+
const plainProcName = `${procname.className}_${procname.name}_p`;
|
1119
|
+
const procName = utils_1.globalParams.wasComputed ? plainProcName : `v${utils_1.globalParams.appVersion}_${plainProcName}`;
|
1120
|
+
const sql = `CALL "${procName}"(${args.map((_v, i) => `$${i + 1}`).join()});`;
|
1121
|
+
try {
|
1122
|
+
client.on('notice', log);
|
1123
|
+
return await client.query(sql, args).then((value) => value.rows);
|
1124
|
+
}
|
1125
|
+
finally {
|
1126
|
+
client.off('notice', log);
|
1127
|
+
client.release();
|
1128
|
+
}
|
1129
|
+
}
|
1130
|
+
async invokeStoredProcFunction(func, config) {
|
1131
|
+
if (!this.#procedurePool) {
|
1132
|
+
throw new Error('User Database not enabled.');
|
1133
|
+
}
|
1134
|
+
const client = await this.#procedurePool.connect();
|
1135
|
+
try {
|
1136
|
+
const readOnly = config.readOnly ?? false;
|
1137
|
+
const isolationLevel = config.isolationLevel ?? transaction_1.IsolationLevel.Serializable;
|
1138
|
+
await client.query(`BEGIN ISOLATION LEVEL ${isolationLevel}`);
|
1139
|
+
if (readOnly) {
|
1140
|
+
await client.query(`SET TRANSACTION READ ONLY`);
|
1141
|
+
}
|
1142
|
+
const result = await func(client);
|
1143
|
+
await client.query(`COMMIT`);
|
1144
|
+
return result;
|
1145
|
+
}
|
1146
|
+
catch (err) {
|
1147
|
+
await client.query(`ROLLBACK`);
|
1148
|
+
throw err;
|
1149
|
+
}
|
1150
|
+
finally {
|
1151
|
+
client.release();
|
1152
|
+
}
|
1153
|
+
}
|
429
1154
|
async runStepTempWF(stepFn, params, ...args) {
|
430
1155
|
return await (await this.startStepTempWF(stepFn, params, undefined, undefined, ...args)).getResult();
|
431
1156
|
}
|
@@ -553,12 +1278,13 @@ class DBOSExecutor {
|
|
553
1278
|
}
|
554
1279
|
else {
|
555
1280
|
// Record the execution and return.
|
1281
|
+
const funcResult = (0, utils_1.serializeFunctionInputOutput)(result, [stepFnName, '<result>']);
|
556
1282
|
await this.systemDatabase.recordOperationResult(wfid, funcID, stepFnName, true, {
|
557
|
-
output:
|
1283
|
+
output: funcResult.stringified,
|
558
1284
|
});
|
559
1285
|
span.setStatus({ code: api_1.SpanStatusCode.OK });
|
560
1286
|
this.tracer.endSpan(span);
|
561
|
-
return
|
1287
|
+
return funcResult.deserialized;
|
562
1288
|
}
|
563
1289
|
}
|
564
1290
|
async runSendTempWF(destinationId, message, topic, idempotencyKey) {
|
@@ -587,7 +1313,7 @@ class DBOSExecutor {
|
|
587
1313
|
*/
|
588
1314
|
forkWorkflow(workflowID, startStep, options = {}) {
|
589
1315
|
const newWorkflowID = options.newWorkflowID ?? (0, context_1.getNextWFID)(undefined);
|
590
|
-
return (0, workflow_management_1.forkWorkflow)(this.systemDatabase, workflowID, startStep, { ...options, newWorkflowID });
|
1316
|
+
return (0, workflow_management_1.forkWorkflow)(this.systemDatabase, this.#userDatabase, workflowID, startStep, { ...options, newWorkflowID });
|
591
1317
|
}
|
592
1318
|
/**
|
593
1319
|
* Retrieve a handle for a workflow UUID.
|
@@ -605,11 +1331,12 @@ class DBOSExecutor {
|
|
605
1331
|
}
|
606
1332
|
try {
|
607
1333
|
const output = await callback();
|
1334
|
+
const funcOutput = (0, utils_1.serializeFunctionInputOutput)(output, [functionName, '<result>']);
|
608
1335
|
await this.systemDatabase.recordOperationResult(workflowID, functionID, functionName, true, {
|
609
|
-
output:
|
1336
|
+
output: funcOutput.stringified,
|
610
1337
|
childWorkflowID: childWfId,
|
611
1338
|
});
|
612
|
-
return
|
1339
|
+
return funcOutput.deserialized;
|
613
1340
|
}
|
614
1341
|
catch (e) {
|
615
1342
|
await this.systemDatabase.recordOperationResult(workflowID, functionID, functionName, false, {
|
@@ -631,7 +1358,18 @@ class DBOSExecutor {
|
|
631
1358
|
return (0, workflow_management_1.listQueuedWorkflows)(this.systemDatabase, input);
|
632
1359
|
}
|
633
1360
|
async listWorkflowSteps(workflowID) {
|
634
|
-
return (0, workflow_management_1.listWorkflowSteps)(this.systemDatabase, workflowID);
|
1361
|
+
return (0, workflow_management_1.listWorkflowSteps)(this.systemDatabase, this.#userDatabase, workflowID);
|
1362
|
+
}
|
1363
|
+
async queryUserDB(sql, params) {
|
1364
|
+
if (!this.#userDatabase) {
|
1365
|
+
throw new Error('User database not enabled.');
|
1366
|
+
}
|
1367
|
+
if (params !== undefined) {
|
1368
|
+
return await this.#userDatabase.query(sql, ...params);
|
1369
|
+
}
|
1370
|
+
else {
|
1371
|
+
return await this.#userDatabase.query(sql);
|
1372
|
+
}
|
635
1373
|
}
|
636
1374
|
/* INTERNAL HELPERS */
|
637
1375
|
/**
|
@@ -737,7 +1475,22 @@ class DBOSExecutor {
|
|
737
1475
|
if (!nameArr[0].startsWith(DBOSExecutor.#tempWorkflowName)) {
|
738
1476
|
throw new error_1.DBOSError(`Cannot find workflow function for a non-temporary workflow, ID ${workflowID}, class '${wfStatus.workflowClassName}', function '${wfName}'; did you change your code?`);
|
739
1477
|
}
|
740
|
-
if (nameArr[1] === exports.TempWorkflowType.
|
1478
|
+
if (nameArr[1] === exports.TempWorkflowType.transaction) {
|
1479
|
+
const txnReg = (0, decorators_1.getFunctionRegistrationByName)(wfStatus.workflowClassName, nameArr[2]);
|
1480
|
+
if (!txnReg?.txnConfig) {
|
1481
|
+
this.logger.error(`Cannot find transaction info for ID ${workflowID}, name ${nameArr[2]}`);
|
1482
|
+
throw new error_1.DBOSNotRegisteredError(nameArr[2]);
|
1483
|
+
}
|
1484
|
+
return await (0, context_1.runWithTopContext)(recoverCtx, async () => {
|
1485
|
+
return await this.startTransactionTempWF(txnReg.registeredFunction, {
|
1486
|
+
workflowUUID: workflowStartID,
|
1487
|
+
configuredInstance: configuredInst,
|
1488
|
+
queueName: wfStatus.queueName,
|
1489
|
+
executeWorkflow: true,
|
1490
|
+
}, undefined, undefined, ...inputs);
|
1491
|
+
});
|
1492
|
+
}
|
1493
|
+
else if (nameArr[1] === exports.TempWorkflowType.step) {
|
741
1494
|
const stepReg = (0, decorators_1.getFunctionRegistrationByName)(wfStatus.workflowClassName, nameArr[2]);
|
742
1495
|
if (!stepReg?.stepConfig) {
|
743
1496
|
this.logger.error(`Cannot find step info for ID ${workflowID}, name ${nameArr[2]}`);
|
@@ -807,6 +1560,19 @@ class DBOSExecutor {
|
|
807
1560
|
async resumeWorkflow(workflowID) {
|
808
1561
|
await this.systemDatabase.resumeWorkflow(workflowID);
|
809
1562
|
}
|
1563
|
+
logRegisteredHTTPUrls() {
|
1564
|
+
this.logger.info('HTTP endpoints supported:');
|
1565
|
+
(0, decorators_1.getAllRegisteredFunctions)().forEach((registeredOperation) => {
|
1566
|
+
const ro = registeredOperation;
|
1567
|
+
if (ro.apiURL) {
|
1568
|
+
this.logger.info(' ' + ro.apiType.padEnd(6) + ' : ' + ro.apiURL);
|
1569
|
+
const roles = ro.getRequiredRoles();
|
1570
|
+
if (roles.length > 0) {
|
1571
|
+
this.logger.info(' Required Roles: ' + utils_1.DBOSJSON.stringify(roles));
|
1572
|
+
}
|
1573
|
+
}
|
1574
|
+
});
|
1575
|
+
}
|
810
1576
|
/**
|
811
1577
|
An application's version is computed from a hash of the source of its workflows.
|
812
1578
|
This is guaranteed to be stable given identical source code because it uses an MD5 hash
|