@dbos-inc/dbos-sdk 2.8.13-preview → 2.8.18-preview
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dbos-config.schema.json +0 -4
- package/dist/dbos-config.schema.json +0 -4
- package/dist/schemas/system_db_schema.d.ts +1 -0
- package/dist/schemas/system_db_schema.d.ts.map +1 -1
- package/dist/schemas/user_db_schema.d.ts +1 -0
- package/dist/schemas/user_db_schema.d.ts.map +1 -1
- package/dist/schemas/user_db_schema.js.map +1 -1
- package/dist/src/client.d.ts.map +1 -1
- package/dist/src/client.js +14 -17
- package/dist/src/client.js.map +1 -1
- package/dist/src/dbos-executor.d.ts +7 -4
- package/dist/src/dbos-executor.d.ts.map +1 -1
- package/dist/src/dbos-executor.js +132 -91
- package/dist/src/dbos-executor.js.map +1 -1
- package/dist/src/dbos-runtime/cli.d.ts.map +1 -1
- package/dist/src/dbos-runtime/cli.js +10 -0
- package/dist/src/dbos-runtime/cli.js.map +1 -1
- package/dist/src/dbos-runtime/config.d.ts +24 -2
- package/dist/src/dbos-runtime/config.d.ts.map +1 -1
- package/dist/src/dbos-runtime/config.js +151 -71
- package/dist/src/dbos-runtime/config.js.map +1 -1
- package/dist/src/dbos-runtime/docker_pg_helper.d.ts +21 -0
- package/dist/src/dbos-runtime/docker_pg_helper.d.ts.map +1 -0
- package/dist/src/dbos-runtime/docker_pg_helper.js +137 -0
- package/dist/src/dbos-runtime/docker_pg_helper.js.map +1 -0
- package/dist/src/dbos-runtime/migrate.d.ts.map +1 -1
- package/dist/src/dbos-runtime/migrate.js +8 -6
- package/dist/src/dbos-runtime/migrate.js.map +1 -1
- package/dist/src/dbos-runtime/runtime.d.ts.map +1 -1
- package/dist/src/dbos-runtime/runtime.js +0 -2
- package/dist/src/dbos-runtime/runtime.js.map +1 -1
- package/dist/src/dbos-runtime/workflow_management.d.ts +5 -4
- package/dist/src/dbos-runtime/workflow_management.d.ts.map +1 -1
- package/dist/src/dbos-runtime/workflow_management.js +14 -16
- package/dist/src/dbos-runtime/workflow_management.js.map +1 -1
- package/dist/src/dbos.d.ts +1 -0
- package/dist/src/dbos.d.ts.map +1 -1
- package/dist/src/dbos.js +41 -22
- package/dist/src/dbos.js.map +1 -1
- package/dist/src/error.d.ts +7 -0
- package/dist/src/error.d.ts.map +1 -1
- package/dist/src/error.js +15 -1
- package/dist/src/error.js.map +1 -1
- package/dist/src/eventreceiver.d.ts +1 -1
- package/dist/src/eventreceiver.d.ts.map +1 -1
- package/dist/src/httpServer/server.d.ts.map +1 -1
- package/dist/src/httpServer/server.js +8 -19
- package/dist/src/httpServer/server.js.map +1 -1
- package/dist/src/system_database.d.ts +84 -53
- package/dist/src/system_database.d.ts.map +1 -1
- package/dist/src/system_database.js +195 -296
- package/dist/src/system_database.js.map +1 -1
- package/dist/src/user_database.d.ts.map +1 -1
- package/dist/src/user_database.js +8 -2
- package/dist/src/user_database.js.map +1 -1
- package/dist/src/workflow.d.ts +3 -2
- package/dist/src/workflow.d.ts.map +1 -1
- package/dist/src/workflow.js +36 -26
- package/dist/src/workflow.js.map +1 -1
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +1 -1
- package/dist/src/dbos-runtime/db_connection.d.ts +0 -10
- package/dist/src/dbos-runtime/db_connection.d.ts.map +0 -1
- package/dist/src/dbos-runtime/db_connection.js +0 -59
- package/dist/src/dbos-runtime/db_connection.js.map +0 -1
- package/dist/src/dbos-runtime/db_wizard.d.ts +0 -3
- package/dist/src/dbos-runtime/db_wizard.d.ts.map +0 -1
- package/dist/src/dbos-runtime/db_wizard.js +0 -170
- package/dist/src/dbos-runtime/db_wizard.js.map +0 -1
@@ -190,24 +190,12 @@ class DBOSExecutor {
|
|
190
190
|
// TODO: make Prisma work with debugger proxy.
|
191
191
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-require-imports
|
192
192
|
const { PrismaClient } = require(node_path_1.default.join(process.cwd(), 'node_modules', '@prisma', 'client')); // Find the prisma client in the node_modules of the current project
|
193
|
-
let dbUrl = `postgresql://${userDBConfig.user}:${userDBConfig.password}@${userDBConfig.host}:${userDBConfig.port}/${userDBConfig.database}`;
|
194
|
-
const queryParams = {};
|
195
|
-
if (userDBConfig.connectionTimeoutMillis) {
|
196
|
-
queryParams['connect_timeout'] = userDBConfig.connectionTimeoutMillis;
|
197
|
-
}
|
198
|
-
if (userDBConfig.max) {
|
199
|
-
queryParams['connection_limit'] = String(userDBConfig.max);
|
200
|
-
}
|
201
|
-
const queryString = new URLSearchParams(queryParams).toString();
|
202
|
-
if (queryString) {
|
203
|
-
dbUrl += `?${queryString}`;
|
204
|
-
}
|
205
193
|
this.userDatabase = new user_database_1.PrismaUserDatabase(
|
206
194
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-call
|
207
195
|
new PrismaClient({
|
208
196
|
datasources: {
|
209
197
|
db: {
|
210
|
-
url:
|
198
|
+
url: userDBConfig.connectionString,
|
211
199
|
},
|
212
200
|
},
|
213
201
|
}));
|
@@ -220,16 +208,11 @@ class DBOSExecutor {
|
|
220
208
|
this.userDatabase = new user_database_1.TypeORMDatabase(
|
221
209
|
// eslint-disable-next-line @typescript-eslint/no-unsafe-argument, @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-member-access
|
222
210
|
new DataSourceExports.DataSource({
|
223
|
-
type: 'postgres',
|
224
|
-
|
225
|
-
|
226
|
-
username: userDBConfig.user,
|
227
|
-
password: userDBConfig.password,
|
228
|
-
database: userDBConfig.database,
|
211
|
+
type: 'postgres',
|
212
|
+
url: userDBConfig.connectionString,
|
213
|
+
connectTimeoutMS: userDBConfig.connectionTimeoutMillis,
|
229
214
|
entities: this.typeormEntities,
|
230
|
-
ssl: userDBConfig.ssl,
|
231
215
|
poolSize: userDBConfig.max,
|
232
|
-
connectTimeoutMS: userDBConfig.connectionTimeoutMillis,
|
233
216
|
}));
|
234
217
|
}
|
235
218
|
catch (s) {
|
@@ -242,13 +225,8 @@ class DBOSExecutor {
|
|
242
225
|
const knexConfig = {
|
243
226
|
client: 'postgres',
|
244
227
|
connection: {
|
245
|
-
|
246
|
-
|
247
|
-
user: userDBConfig.user,
|
248
|
-
password: userDBConfig.password,
|
249
|
-
database: userDBConfig.database,
|
250
|
-
ssl: userDBConfig.ssl,
|
251
|
-
connectTimeout: userDBConfig.connectionTimeoutMillis,
|
228
|
+
connectionString: userDBConfig.connectionString,
|
229
|
+
connectionTimeoutMillis: userDBConfig.connectionTimeoutMillis,
|
252
230
|
},
|
253
231
|
pool: {
|
254
232
|
min: 0,
|
@@ -542,30 +520,38 @@ class DBOSExecutor {
|
|
542
520
|
return this.procedureInfoMap.get(pfName);
|
543
521
|
}
|
544
522
|
// TODO: getProcedureInfoByNames??
|
523
|
+
static reviveResultOrError(r, success) {
|
524
|
+
if (success === true || !r.err) {
|
525
|
+
return utils_1.DBOSJSON.parse(r.res ?? null);
|
526
|
+
}
|
527
|
+
else {
|
528
|
+
throw (0, serialize_error_1.deserializeError)(utils_1.DBOSJSON.parse(r.err));
|
529
|
+
}
|
530
|
+
}
|
545
531
|
async workflow(wf, params, ...args) {
|
546
532
|
return this.internalWorkflow(wf, params, undefined, undefined, ...args);
|
547
533
|
}
|
548
534
|
// If callerUUID and functionID are set, it means the workflow is invoked from within a workflow.
|
549
|
-
async internalWorkflow(wf, params,
|
550
|
-
const
|
551
|
-
const
|
535
|
+
async internalWorkflow(wf, params, callerID, callerFunctionID, ...args) {
|
536
|
+
const workflowID = params.workflowUUID ? params.workflowUUID : this.#generateUUID();
|
537
|
+
const presetID = params.workflowUUID ? true : false;
|
552
538
|
const wInfo = this.getWorkflowInfo(wf);
|
553
539
|
if (wInfo === undefined) {
|
554
540
|
throw new error_1.DBOSNotRegisteredError(wf.name);
|
555
541
|
}
|
556
542
|
const wConfig = wInfo.config;
|
557
543
|
const passContext = wInfo.registration?.passContext ?? true;
|
558
|
-
const wCtxt = new workflow_1.WorkflowContextImpl(this, params.parentCtx,
|
544
|
+
const wCtxt = new workflow_1.WorkflowContextImpl(this, params.parentCtx, workflowID, wConfig, wf.name, presetID, params.tempWfType, params.tempWfName);
|
559
545
|
const internalStatus = {
|
560
|
-
workflowUUID:
|
546
|
+
workflowUUID: workflowID,
|
561
547
|
status: params.queueName !== undefined ? workflow_1.StatusString.ENQUEUED : workflow_1.StatusString.PENDING,
|
562
548
|
workflowName: wf.name,
|
563
549
|
workflowClassName: wCtxt.isTempWorkflow ? '' : (0, decorators_1.getRegisteredMethodClassName)(wf),
|
564
550
|
workflowConfigName: params.configuredInstance?.name || '',
|
565
551
|
queueName: params.queueName,
|
566
552
|
authenticatedUser: wCtxt.authenticatedUser,
|
567
|
-
output:
|
568
|
-
error:
|
553
|
+
output: null,
|
554
|
+
error: null,
|
569
555
|
assumedRole: wCtxt.assumedRole,
|
570
556
|
authenticatedRoles: wCtxt.authenticatedRoles,
|
571
557
|
request: wCtxt.request,
|
@@ -583,28 +569,31 @@ class DBOSExecutor {
|
|
583
569
|
// Synchronously set the workflow's status to PENDING and record workflow inputs.
|
584
570
|
// We have to do it for all types of workflows because operation_outputs table has a foreign key constraint on workflow status table.
|
585
571
|
if (this.isDebugging) {
|
586
|
-
const wfStatus = await this.systemDatabase.getWorkflowStatus(
|
587
|
-
const wfInputs = await this.systemDatabase.getWorkflowInputs(
|
572
|
+
const wfStatus = await this.systemDatabase.getWorkflowStatus(workflowID);
|
573
|
+
const wfInputs = await this.systemDatabase.getWorkflowInputs(workflowID);
|
588
574
|
if (!wfStatus || !wfInputs) {
|
589
|
-
throw new error_1.DBOSDebuggerError(`Failed to find inputs for workflow UUID ${
|
575
|
+
throw new error_1.DBOSDebuggerError(`Failed to find inputs for workflow UUID ${workflowID}`);
|
590
576
|
}
|
591
577
|
// Make sure we use the same input.
|
592
578
|
if (utils_1.DBOSJSON.stringify(args) !== utils_1.DBOSJSON.stringify(wfInputs)) {
|
593
|
-
throw new error_1.DBOSDebuggerError(`Detected different inputs for workflow UUID ${
|
579
|
+
throw new error_1.DBOSDebuggerError(`Detected different inputs for workflow UUID ${workflowID}.\n Received: ${utils_1.DBOSJSON.stringify(args)}\n Original: ${utils_1.DBOSJSON.stringify(wfInputs)}`);
|
594
580
|
}
|
595
581
|
status = wfStatus.status;
|
596
582
|
}
|
597
583
|
else {
|
598
584
|
// TODO: Make this transactional (and with the queue step below)
|
599
|
-
if (callerFunctionID !== undefined &&
|
600
|
-
const
|
601
|
-
if (
|
602
|
-
return new workflow_1.RetrievedHandle(this.systemDatabase,
|
585
|
+
if (callerFunctionID !== undefined && callerID !== undefined) {
|
586
|
+
const cr = await this.systemDatabase.getOperationResult(callerID, callerFunctionID);
|
587
|
+
if (cr.res !== undefined) {
|
588
|
+
return new workflow_1.RetrievedHandle(this.systemDatabase, cr.res.child, callerID, callerFunctionID);
|
603
589
|
}
|
604
590
|
}
|
605
591
|
const ires = await this.systemDatabase.initWorkflowStatus(internalStatus, args);
|
606
|
-
if (callerFunctionID !== undefined &&
|
607
|
-
await this.systemDatabase.
|
592
|
+
if (callerFunctionID !== undefined && callerID !== undefined) {
|
593
|
+
await this.systemDatabase.recordOperationResult(callerID, callerFunctionID, {
|
594
|
+
childWfId: workflowID,
|
595
|
+
functionName: internalStatus.workflowName,
|
596
|
+
}, true);
|
608
597
|
}
|
609
598
|
args = ires.args;
|
610
599
|
status = ires.status;
|
@@ -621,9 +610,9 @@ class DBOSExecutor {
|
|
621
610
|
return await callPromise;
|
622
611
|
});
|
623
612
|
if (this.isDebugging) {
|
624
|
-
const recordedResult = await this.systemDatabase.
|
613
|
+
const recordedResult = DBOSExecutor.reviveResultOrError((await this.systemDatabase.awaitWorkflowResult(workflowID)));
|
625
614
|
if (!resultsMatch(recordedResult, callResult)) {
|
626
|
-
this.logger.error(`Detect different output for the workflow UUID ${
|
615
|
+
this.logger.error(`Detect different output for the workflow UUID ${workflowID}!\n Received: ${utils_1.DBOSJSON.stringify(callResult)}\n Original: ${utils_1.DBOSJSON.stringify(recordedResult)}`);
|
627
616
|
}
|
628
617
|
result = recordedResult;
|
629
618
|
}
|
@@ -636,23 +625,20 @@ class DBOSExecutor {
|
|
636
625
|
}
|
637
626
|
return utils_1.DBOSJSON.stringify(recordedResult) === utils_1.DBOSJSON.stringify(callResult);
|
638
627
|
}
|
639
|
-
internalStatus.output = result;
|
628
|
+
internalStatus.output = utils_1.DBOSJSON.stringify(result);
|
640
629
|
internalStatus.status = workflow_1.StatusString.SUCCESS;
|
641
630
|
if (internalStatus.queueName && !this.isDebugging) {
|
642
|
-
|
643
|
-
// But waiting this long is for concurrency control anyway,
|
644
|
-
// so it is probably done enough.
|
645
|
-
await this.systemDatabase.dequeueWorkflow(workflowUUID, this.#getQueueByName(internalStatus.queueName));
|
631
|
+
await this.systemDatabase.dequeueWorkflow(workflowID, this.#getQueueByName(internalStatus.queueName));
|
646
632
|
}
|
647
633
|
if (!this.isDebugging) {
|
648
|
-
await this.systemDatabase.recordWorkflowOutput(
|
634
|
+
await this.systemDatabase.recordWorkflowOutput(workflowID, internalStatus);
|
649
635
|
}
|
650
636
|
wCtxt.span.setStatus({ code: api_1.SpanStatusCode.OK });
|
651
637
|
}
|
652
638
|
catch (err) {
|
653
639
|
if (err instanceof error_1.DBOSWorkflowConflictUUIDError) {
|
654
640
|
// Retrieve the handle and wait for the result.
|
655
|
-
const retrievedHandle = this.retrieveWorkflow(
|
641
|
+
const retrievedHandle = this.retrieveWorkflow(workflowID);
|
656
642
|
result = await retrievedHandle.getResult();
|
657
643
|
wCtxt.span.setAttribute('cached', true);
|
658
644
|
wCtxt.span.setStatus({ code: api_1.SpanStatusCode.OK });
|
@@ -661,10 +647,10 @@ class DBOSExecutor {
|
|
661
647
|
internalStatus.error = err.message;
|
662
648
|
internalStatus.status = workflow_1.StatusString.CANCELLED;
|
663
649
|
if (!this.isDebugging) {
|
664
|
-
await this.systemDatabase.setWorkflowStatus(
|
650
|
+
await this.systemDatabase.setWorkflowStatus(workflowID, workflow_1.StatusString.CANCELLED, false);
|
665
651
|
}
|
666
652
|
wCtxt.span.setStatus({ code: api_1.SpanStatusCode.ERROR, message: err.message });
|
667
|
-
this.logger.info(`Cancelled workflow ${
|
653
|
+
this.logger.info(`Cancelled workflow ${workflowID}`);
|
668
654
|
throw err;
|
669
655
|
}
|
670
656
|
else {
|
@@ -678,10 +664,10 @@ class DBOSExecutor {
|
|
678
664
|
internalStatus.error = utils_1.DBOSJSON.stringify((0, serialize_error_1.serializeError)(e));
|
679
665
|
internalStatus.status = workflow_1.StatusString.ERROR;
|
680
666
|
if (internalStatus.queueName && !this.isDebugging) {
|
681
|
-
await this.systemDatabase.dequeueWorkflow(
|
667
|
+
await this.systemDatabase.dequeueWorkflow(workflowID, this.#getQueueByName(internalStatus.queueName));
|
682
668
|
}
|
683
669
|
if (!this.isDebugging) {
|
684
|
-
await this.systemDatabase.recordWorkflowError(
|
670
|
+
await this.systemDatabase.recordWorkflowError(workflowID, internalStatus);
|
685
671
|
}
|
686
672
|
// TODO: Log errors, but not in the tests when they're expected.
|
687
673
|
wCtxt.span.setStatus({ code: api_1.SpanStatusCode.ERROR, message: e.message });
|
@@ -703,17 +689,17 @@ class DBOSExecutor {
|
|
703
689
|
})
|
704
690
|
.finally(() => {
|
705
691
|
// Remove itself from pending workflow map.
|
706
|
-
this.pendingWorkflowMap.delete(
|
692
|
+
this.pendingWorkflowMap.delete(workflowID);
|
707
693
|
});
|
708
|
-
this.pendingWorkflowMap.set(
|
694
|
+
this.pendingWorkflowMap.set(workflowID, awaitWorkflowPromise);
|
709
695
|
// Return the normal handle that doesn't capture errors.
|
710
|
-
return new workflow_1.InvokedHandle(this.systemDatabase, workflowPromise,
|
696
|
+
return new workflow_1.InvokedHandle(this.systemDatabase, workflowPromise, workflowID, wf.name, callerID, callerFunctionID);
|
711
697
|
}
|
712
698
|
else {
|
713
699
|
if (params.queueName && status === 'ENQUEUED' && !this.isDebugging) {
|
714
|
-
await this.systemDatabase.enqueueWorkflow(
|
700
|
+
await this.systemDatabase.enqueueWorkflow(workflowID, this.#getQueueByName(params.queueName).name);
|
715
701
|
}
|
716
|
-
return new workflow_1.RetrievedHandle(this.systemDatabase,
|
702
|
+
return new workflow_1.RetrievedHandle(this.systemDatabase, workflowID, callerID, callerFunctionID);
|
717
703
|
}
|
718
704
|
}
|
719
705
|
#getQueueByName(name) {
|
@@ -736,14 +722,23 @@ class DBOSExecutor {
|
|
736
722
|
* Otherwise, return DBOSNull.
|
737
723
|
* Also return the transaction snapshot and id information of the original or current transaction.
|
738
724
|
*/
|
739
|
-
async #checkExecution(query, workflowUUID, funcID) {
|
740
|
-
const rows = await query(
|
725
|
+
async #checkExecution(query, workflowUUID, funcID, funcName) {
|
726
|
+
const rows = await query(`(SELECT output, error, txn_snapshot, txn_id, function_name, true as recorded
|
727
|
+
FROM dbos.transaction_outputs
|
728
|
+
WHERE workflow_uuid=$1 AND function_id=$2
|
729
|
+
UNION ALL
|
730
|
+
SELECT null as output, null as error, pg_current_snapshot()::text as txn_snapshot,
|
731
|
+
null as txn_id, '' as function_name, false as recorded
|
732
|
+
) ORDER BY recorded`, [workflowUUID, funcID]);
|
741
733
|
if (rows.length === 0 || rows.length > 2) {
|
742
734
|
const returnedRows = JSON.stringify(rows);
|
743
735
|
this.logger.error('Unexpected! This should never happen. Returned rows: ' + returnedRows);
|
744
736
|
throw new error_1.DBOSError('This should never happen. Returned rows: ' + returnedRows);
|
745
737
|
}
|
746
738
|
if (rows.length === 2) {
|
739
|
+
if (rows[1].function_name !== funcName) {
|
740
|
+
throw new error_1.DBOSUnexpectedStepError(workflowUUID, funcID, funcName, rows[0].function_name);
|
741
|
+
}
|
747
742
|
const { txn_snapshot, txn_id } = rows[1];
|
748
743
|
const error = utils_1.DBOSJSON.parse(rows[1].error);
|
749
744
|
if (error) {
|
@@ -858,7 +853,7 @@ class DBOSExecutor {
|
|
858
853
|
let prevResult = exports.dbosNull;
|
859
854
|
const queryFunc = (sql, args) => this.userDatabase.queryWithClient(client, sql, ...args);
|
860
855
|
if (wfCtx.presetUUID) {
|
861
|
-
const executionResult = await this.#checkExecution(queryFunc, workflowUUID, funcId);
|
856
|
+
const executionResult = await this.#checkExecution(queryFunc, workflowUUID, funcId, txn.name);
|
862
857
|
prevResult = executionResult.result;
|
863
858
|
txn_snapshot = executionResult.txn_snapshot;
|
864
859
|
if (prevResult !== exports.dbosNull) {
|
@@ -941,7 +936,7 @@ class DBOSExecutor {
|
|
941
936
|
}
|
942
937
|
catch (err) {
|
943
938
|
const e = err;
|
944
|
-
if (!this.debugMode) {
|
939
|
+
if (!this.debugMode && !(e instanceof error_1.DBOSUnexpectedStepError)) {
|
945
940
|
if (this.userDatabase.isRetriableTransactionError(err)) {
|
946
941
|
// serialization_failure in PostgreSQL
|
947
942
|
span.addEvent('TXN SERIALIZATION FAILURE', { retryWaitMillis: retryWaitMillis }, performance.now());
|
@@ -1026,7 +1021,7 @@ class DBOSExecutor {
|
|
1026
1021
|
let prevResult = exports.dbosNull;
|
1027
1022
|
const queryFunc = (sql, args) => this.procedurePool.query(sql, args).then((v) => v.rows);
|
1028
1023
|
if (wfCtx.presetUUID) {
|
1029
|
-
const executionResult = await this.#checkExecution(queryFunc, wfCtx.workflowUUID, funcId);
|
1024
|
+
const executionResult = await this.#checkExecution(queryFunc, wfCtx.workflowUUID, funcId, wfCtx.operationName);
|
1030
1025
|
prevResult = executionResult.result;
|
1031
1026
|
txn_snapshot = executionResult.txn_snapshot;
|
1032
1027
|
if (prevResult !== exports.dbosNull) {
|
@@ -1239,8 +1234,12 @@ class DBOSExecutor {
|
|
1239
1234
|
}, wfCtx.span);
|
1240
1235
|
const ctxt = new step_1.StepContextImpl(wfCtx, funcID, span, this.logger, commInfo.config, stepFn.name);
|
1241
1236
|
// Check if this execution previously happened, returning its original result if it did.
|
1242
|
-
const
|
1243
|
-
if (
|
1237
|
+
const checkr = await this.systemDatabase.getOperationResult(wfCtx.workflowUUID, ctxt.functionID);
|
1238
|
+
if (checkr.res !== undefined) {
|
1239
|
+
if (checkr.res.functionName !== ctxt.operationName) {
|
1240
|
+
throw new error_1.DBOSUnexpectedStepError(ctxt.workflowUUID, ctxt.functionID, ctxt.operationName, checkr.res.functionName ?? '?');
|
1241
|
+
}
|
1242
|
+
const check = DBOSExecutor.reviveResultOrError(checkr.res);
|
1244
1243
|
ctxt.span.setAttribute('cached', true);
|
1245
1244
|
ctxt.span.setStatus({ code: api_1.SpanStatusCode.OK });
|
1246
1245
|
this.tracer.endSpan(ctxt.span);
|
@@ -1318,14 +1317,20 @@ class DBOSExecutor {
|
|
1318
1317
|
if (result === exports.dbosNull) {
|
1319
1318
|
// Record the error, then throw it.
|
1320
1319
|
err = err === exports.dbosNull ? new error_1.DBOSMaxStepRetriesError(stepFn.name, ctxt.maxAttempts, errors) : err;
|
1321
|
-
await this.systemDatabase.
|
1320
|
+
await this.systemDatabase.recordOperationResult(wfCtx.workflowUUID, ctxt.functionID, {
|
1321
|
+
serialError: utils_1.DBOSJSON.stringify((0, serialize_error_1.serializeError)(err)),
|
1322
|
+
functionName: ctxt.operationName,
|
1323
|
+
}, true);
|
1322
1324
|
ctxt.span.setStatus({ code: api_1.SpanStatusCode.ERROR, message: err.message });
|
1323
1325
|
this.tracer.endSpan(ctxt.span);
|
1324
1326
|
throw err;
|
1325
1327
|
}
|
1326
1328
|
else {
|
1327
1329
|
// Record the execution and return.
|
1328
|
-
await this.systemDatabase.
|
1330
|
+
await this.systemDatabase.recordOperationResult(wfCtx.workflowUUID, ctxt.functionID, {
|
1331
|
+
serialOutput: utils_1.DBOSJSON.stringify(result),
|
1332
|
+
functionName: ctxt.operationName,
|
1333
|
+
}, true);
|
1329
1334
|
ctxt.span.setStatus({ code: api_1.SpanStatusCode.OK });
|
1330
1335
|
this.tracer.endSpan(ctxt.span);
|
1331
1336
|
return result;
|
@@ -1347,7 +1352,7 @@ class DBOSExecutor {
|
|
1347
1352
|
* Wait for a workflow to emit an event, then return its value.
|
1348
1353
|
*/
|
1349
1354
|
async getEvent(workflowUUID, key, timeoutSeconds = DBOSExecutor.defaultNotificationTimeoutSec) {
|
1350
|
-
return this.systemDatabase.getEvent(workflowUUID, key, timeoutSeconds);
|
1355
|
+
return utils_1.DBOSJSON.parse(await this.systemDatabase.getEvent(workflowUUID, key, timeoutSeconds));
|
1351
1356
|
}
|
1352
1357
|
/**
|
1353
1358
|
* Retrieve a handle for a workflow UUID.
|
@@ -1355,8 +1360,32 @@ class DBOSExecutor {
|
|
1355
1360
|
retrieveWorkflow(workflowID) {
|
1356
1361
|
return new workflow_1.RetrievedHandle(this.systemDatabase, workflowID);
|
1357
1362
|
}
|
1358
|
-
|
1359
|
-
|
1363
|
+
async runAsStep(callback, functionName, workflowID, functionID) {
|
1364
|
+
if (workflowID !== undefined && functionID !== undefined) {
|
1365
|
+
const res = await this.systemDatabase.getOperationResult(workflowID, functionID);
|
1366
|
+
if (res.res !== undefined) {
|
1367
|
+
if (res.res.functionName !== functionName) {
|
1368
|
+
throw new error_1.DBOSUnexpectedStepError(workflowID, functionID, functionName, res.res.functionName);
|
1369
|
+
}
|
1370
|
+
return DBOSExecutor.reviveResultOrError(res.res);
|
1371
|
+
}
|
1372
|
+
}
|
1373
|
+
try {
|
1374
|
+
const output = await callback();
|
1375
|
+
if (workflowID !== undefined && functionID !== undefined) {
|
1376
|
+
await this.systemDatabase.recordOperationResult(workflowID, functionID, { serialOutput: utils_1.DBOSJSON.stringify(output), functionName }, true);
|
1377
|
+
}
|
1378
|
+
return output;
|
1379
|
+
}
|
1380
|
+
catch (e) {
|
1381
|
+
if (workflowID !== undefined && functionID !== undefined) {
|
1382
|
+
await this.systemDatabase.recordOperationResult(workflowID, functionID, { serialError: utils_1.DBOSJSON.stringify((0, serialize_error_1.serializeError)(e)), functionName }, false);
|
1383
|
+
}
|
1384
|
+
throw e;
|
1385
|
+
}
|
1386
|
+
}
|
1387
|
+
getWorkflowStatus(workflowID, callerID, callerFN) {
|
1388
|
+
return this.systemDatabase.getWorkflowStatus(workflowID, callerID, callerFN);
|
1360
1389
|
}
|
1361
1390
|
getWorkflows(input) {
|
1362
1391
|
return this.systemDatabase.getWorkflows(input);
|
@@ -1473,20 +1502,20 @@ class DBOSExecutor {
|
|
1473
1502
|
this.logger.warn(`Error destroying wf queue runner: ${e.message}`);
|
1474
1503
|
}
|
1475
1504
|
}
|
1476
|
-
async executeWorkflowUUID(
|
1477
|
-
const wfStatus = await this.systemDatabase.getWorkflowStatus(
|
1478
|
-
const inputs = await this.systemDatabase.getWorkflowInputs(
|
1505
|
+
async executeWorkflowUUID(workflowID, startNewWorkflow = false) {
|
1506
|
+
const wfStatus = await this.systemDatabase.getWorkflowStatus(workflowID);
|
1507
|
+
const inputs = await this.systemDatabase.getWorkflowInputs(workflowID);
|
1479
1508
|
if (!inputs || !wfStatus) {
|
1480
|
-
this.logger.error(`Failed to find inputs for workflowUUID: ${
|
1481
|
-
throw new error_1.DBOSError(`Failed to find inputs for workflow UUID: ${
|
1509
|
+
this.logger.error(`Failed to find inputs for workflowUUID: ${workflowID}`);
|
1510
|
+
throw new error_1.DBOSError(`Failed to find inputs for workflow UUID: ${workflowID}`);
|
1482
1511
|
}
|
1483
|
-
const parentCtx = this.#getRecoveryContext(
|
1512
|
+
const parentCtx = this.#getRecoveryContext(workflowID, wfStatus);
|
1484
1513
|
const { wfInfo, configuredInst } = this.getWorkflowInfoByStatus(wfStatus);
|
1485
1514
|
// If starting a new workflow, assign a new UUID. Otherwise, use the workflow's original UUID.
|
1486
|
-
const
|
1515
|
+
const workflowStartID = startNewWorkflow ? undefined : workflowID;
|
1487
1516
|
if (wfInfo) {
|
1488
1517
|
return this.workflow(wfInfo.workflow, {
|
1489
|
-
workflowUUID:
|
1518
|
+
workflowUUID: workflowStartID,
|
1490
1519
|
parentCtx: parentCtx,
|
1491
1520
|
configuredInstance: configuredInst,
|
1492
1521
|
queueName: wfStatus.queueName,
|
@@ -1500,17 +1529,17 @@ class DBOSExecutor {
|
|
1500
1529
|
const nameArr = wfName.split('-');
|
1501
1530
|
if (!nameArr[0].startsWith(DBOSExecutor.tempWorkflowName)) {
|
1502
1531
|
// CB - Doesn't this happen if the user changed the function name in their code?
|
1503
|
-
throw new error_1.DBOSError(`This should never happen! Cannot find workflow info for a non-temporary workflow! UUID ${
|
1532
|
+
throw new error_1.DBOSError(`This should never happen! Cannot find workflow info for a non-temporary workflow! UUID ${workflowID}, name ${wfName}`);
|
1504
1533
|
}
|
1505
1534
|
let temp_workflow;
|
1506
1535
|
if (nameArr[1] === exports.TempWorkflowType.transaction) {
|
1507
1536
|
const { txnInfo, clsInst } = this.getTransactionInfoByNames(wfStatus.workflowClassName, nameArr[2], wfStatus.workflowConfigName);
|
1508
1537
|
if (!txnInfo) {
|
1509
|
-
this.logger.error(`Cannot find transaction info for UUID ${
|
1538
|
+
this.logger.error(`Cannot find transaction info for UUID ${workflowID}, name ${nameArr[2]}`);
|
1510
1539
|
throw new error_1.DBOSNotRegisteredError(nameArr[2]);
|
1511
1540
|
}
|
1512
1541
|
return await this.startTransactionTempWF(txnInfo.transaction, {
|
1513
|
-
workflowUUID:
|
1542
|
+
workflowUUID: workflowStartID,
|
1514
1543
|
parentCtx: parentCtx ?? undefined,
|
1515
1544
|
configuredInstance: clsInst,
|
1516
1545
|
queueName: wfStatus.queueName,
|
@@ -1522,11 +1551,11 @@ class DBOSExecutor {
|
|
1522
1551
|
else if (nameArr[1] === exports.TempWorkflowType.step) {
|
1523
1552
|
const { commInfo, clsInst } = this.getStepInfoByNames(wfStatus.workflowClassName, nameArr[2], wfStatus.workflowConfigName);
|
1524
1553
|
if (!commInfo) {
|
1525
|
-
this.logger.error(`Cannot find step info for UUID ${
|
1554
|
+
this.logger.error(`Cannot find step info for UUID ${workflowID}, name ${nameArr[2]}`);
|
1526
1555
|
throw new error_1.DBOSNotRegisteredError(nameArr[2]);
|
1527
1556
|
}
|
1528
1557
|
return await this.startStepTempWF(commInfo.step, {
|
1529
|
-
workflowUUID:
|
1558
|
+
workflowUUID: workflowStartID,
|
1530
1559
|
parentCtx: parentCtx ?? undefined,
|
1531
1560
|
configuredInstance: clsInst,
|
1532
1561
|
queueName: wfStatus.queueName, // Probably null
|
@@ -1540,7 +1569,7 @@ class DBOSExecutor {
|
|
1540
1569
|
return await ctxt.send(args[0], args[1], args[2]); // id, value, topic
|
1541
1570
|
};
|
1542
1571
|
return this.workflow(temp_workflow, {
|
1543
|
-
workflowUUID:
|
1572
|
+
workflowUUID: workflowStartID,
|
1544
1573
|
parentCtx: parentCtx ?? undefined,
|
1545
1574
|
tempWfType: exports.TempWorkflowType.send,
|
1546
1575
|
queueName: wfStatus.queueName,
|
@@ -1550,7 +1579,7 @@ class DBOSExecutor {
|
|
1550
1579
|
...inputs);
|
1551
1580
|
}
|
1552
1581
|
else {
|
1553
|
-
this.logger.error(`Unrecognized temporary workflow! UUID ${
|
1582
|
+
this.logger.error(`Unrecognized temporary workflow! UUID ${workflowID}, name ${wfName}`);
|
1554
1583
|
throw new error_1.DBOSNotRegisteredError(wfName);
|
1555
1584
|
}
|
1556
1585
|
}
|
@@ -1575,8 +1604,20 @@ class DBOSExecutor {
|
|
1575
1604
|
this.logger.info(`Cancelling workflow ${workflowID}`);
|
1576
1605
|
this.workflowCancellationMap.set(workflowID, true);
|
1577
1606
|
}
|
1607
|
+
async getWorkflowSteps(workflowID) {
|
1608
|
+
const outputs = await this.systemDatabase.getAllOperationResults(workflowID);
|
1609
|
+
return outputs.map((row) => {
|
1610
|
+
return {
|
1611
|
+
function_id: row.function_id,
|
1612
|
+
function_name: row.function_name ?? '<unknown>',
|
1613
|
+
child_workflow_id: row.child_workflow_id,
|
1614
|
+
output: row.output !== null ? utils_1.DBOSJSON.parse(row.output) : null,
|
1615
|
+
error: row.error !== null ? (0, serialize_error_1.deserializeError)(utils_1.DBOSJSON.parse(row.error)) : null,
|
1616
|
+
};
|
1617
|
+
});
|
1618
|
+
}
|
1578
1619
|
async listWorkflowSteps(workflowID) {
|
1579
|
-
const steps = await this.
|
1620
|
+
const steps = await this.getWorkflowSteps(workflowID);
|
1580
1621
|
const transactions = await this.getTransactions(workflowID);
|
1581
1622
|
const merged = [...steps, ...transactions];
|
1582
1623
|
merged.sort((a, b) => a.function_id - b.function_id);
|