@dbos-inc/dbos-sdk 2.1.5-preview.g539c9d794d → 2.1.12-preview

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/compose.yaml +19 -0
  2. package/dbos-config.schema.json +2 -11
  3. package/dist/src/context.d.ts +2 -0
  4. package/dist/src/context.d.ts.map +1 -1
  5. package/dist/src/context.js +16 -1
  6. package/dist/src/context.js.map +1 -1
  7. package/dist/src/dbos-executor.d.ts +9 -12
  8. package/dist/src/dbos-executor.d.ts.map +1 -1
  9. package/dist/src/dbos-executor.js +403 -90
  10. package/dist/src/dbos-executor.js.map +1 -1
  11. package/dist/src/dbos-runtime/cli.d.ts +1 -0
  12. package/dist/src/dbos-runtime/cli.d.ts.map +1 -1
  13. package/dist/src/dbos-runtime/cli.js +13 -2
  14. package/dist/src/dbos-runtime/cli.js.map +1 -1
  15. package/dist/src/dbos-runtime/config.d.ts +8 -7
  16. package/dist/src/dbos-runtime/config.d.ts.map +1 -1
  17. package/dist/src/dbos-runtime/config.js +26 -18
  18. package/dist/src/dbos-runtime/config.js.map +1 -1
  19. package/dist/src/dbos-runtime/db_connection.d.ts +10 -0
  20. package/dist/src/dbos-runtime/db_connection.d.ts.map +1 -0
  21. package/dist/src/dbos-runtime/db_connection.js +59 -0
  22. package/dist/src/dbos-runtime/db_connection.js.map +1 -0
  23. package/dist/src/dbos-runtime/db_wizard.d.ts.map +1 -1
  24. package/dist/src/dbos-runtime/db_wizard.js +10 -14
  25. package/dist/src/dbos-runtime/db_wizard.js.map +1 -1
  26. package/dist/src/dbos-runtime/migrate.d.ts.map +1 -1
  27. package/dist/src/dbos-runtime/migrate.js +2 -3
  28. package/dist/src/dbos-runtime/migrate.js.map +1 -1
  29. package/dist/src/dbos-runtime/reset.d.ts +4 -0
  30. package/dist/src/dbos-runtime/reset.d.ts.map +1 -0
  31. package/dist/src/dbos-runtime/reset.js +39 -0
  32. package/dist/src/dbos-runtime/reset.js.map +1 -0
  33. package/dist/src/dbos.d.ts +2 -0
  34. package/dist/src/dbos.d.ts.map +1 -1
  35. package/dist/src/dbos.js +50 -1
  36. package/dist/src/dbos.js.map +1 -1
  37. package/dist/src/error.d.ts +3 -0
  38. package/dist/src/error.d.ts.map +1 -1
  39. package/dist/src/error.js +10 -2
  40. package/dist/src/error.js.map +1 -1
  41. package/dist/src/eventreceiver.d.ts +2 -0
  42. package/dist/src/eventreceiver.d.ts.map +1 -1
  43. package/dist/src/httpServer/handler.js.map +1 -1
  44. package/dist/src/procedure.d.ts +3 -4
  45. package/dist/src/procedure.d.ts.map +1 -1
  46. package/dist/src/procedure.js +3 -1
  47. package/dist/src/procedure.js.map +1 -1
  48. package/dist/src/step.d.ts +1 -2
  49. package/dist/src/step.d.ts.map +1 -1
  50. package/dist/src/step.js.map +1 -1
  51. package/dist/src/system_database.d.ts.map +1 -1
  52. package/dist/src/system_database.js +31 -4
  53. package/dist/src/system_database.js.map +1 -1
  54. package/dist/src/testing/testing_runtime.js.map +1 -1
  55. package/dist/src/transaction.d.ts +1 -2
  56. package/dist/src/transaction.d.ts.map +1 -1
  57. package/dist/src/transaction.js.map +1 -1
  58. package/dist/src/utils.d.ts.map +1 -1
  59. package/dist/src/utils.js +1 -14
  60. package/dist/src/utils.js.map +1 -1
  61. package/dist/src/workflow.d.ts +1 -13
  62. package/dist/src/workflow.d.ts.map +1 -1
  63. package/dist/src/workflow.js +4 -322
  64. package/dist/src/workflow.js.map +1 -1
  65. package/dist/tsconfig.build.tsbuildinfo +1 -1
  66. package/package.json +1 -1
  67. package/dist/src/debugger/debug_workflow.d.ts +0 -56
  68. package/dist/src/debugger/debug_workflow.d.ts.map +0 -1
  69. package/dist/src/debugger/debug_workflow.js +0 -388
  70. package/dist/src/debugger/debug_workflow.js.map +0 -1
@@ -20,10 +20,10 @@ const decorators_1 = require("./decorators");
20
20
  const api_1 = require("@opentelemetry/api");
21
21
  const knex_1 = __importDefault(require("knex"));
22
22
  const context_1 = require("./context");
23
- const debug_workflow_1 = require("./debugger/debug_workflow");
24
23
  const serialize_error_1 = require("serialize-error");
25
24
  const utils_1 = require("./utils");
26
25
  const node_path_1 = __importDefault(require("node:path"));
26
+ const procedure_1 = require("./procedure");
27
27
  const lodash_1 = require("lodash");
28
28
  const wfqueue_1 = require("./wfqueue");
29
29
  const debugpoint_1 = require("./debugpoint");
@@ -279,7 +279,6 @@ class DBOSExecutor {
279
279
  }
280
280
  this.logger.debug(`Loaded ${length} ORM entities`);
281
281
  }
282
- await ((0, user_database_1.createDBIfDoesNotExist)(this.config.poolConfig, this.logger));
283
282
  this.configureDbClient();
284
283
  if (!this.userDatabase) {
285
284
  this.logger.error("No user database configured!");
@@ -354,24 +353,6 @@ class DBOSExecutor {
354
353
  this.logger.error(`Unknown notice severity: ${msg.severity} - ${msg.message}`);
355
354
  }
356
355
  }
357
- async callProcedure(proc, args) {
358
- const client = await this.procedurePool.connect();
359
- const log = (msg) => this.#logNotice(msg);
360
- const procClassName = this.getProcedureClassName(proc);
361
- const plainProcName = `${procClassName}_${proc.name}_p`;
362
- const procName = this.config.appVersion
363
- ? `v${this.config.appVersion}_${plainProcName}`
364
- : plainProcName;
365
- const sql = `CALL "${procName}"(${args.map((_v, i) => `$${i + 1}`).join()});`;
366
- try {
367
- client.on('notice', log);
368
- return await client.query(sql, args).then(value => value.rows);
369
- }
370
- finally {
371
- client.off('notice', log);
372
- client.release();
373
- }
374
- }
375
356
  async destroy() {
376
357
  if (this.pendingWorkflowMap.size > 0) {
377
358
  this.logger.info("Waiting for pending workflows to finish.");
@@ -501,9 +482,6 @@ class DBOSExecutor {
501
482
  }
502
483
  // TODO: getProcedureInfoByNames??
503
484
  async workflow(wf, params, ...args) {
504
- if (this.debugMode) {
505
- return this.debugWorkflow(wf, params, undefined, undefined, ...args);
506
- }
507
485
  return this.internalWorkflow(wf, params, undefined, undefined, ...args);
508
486
  }
509
487
  // If callerUUID and functionID are set, it means the workflow is invoked from within a workflow.
@@ -547,35 +525,57 @@ class DBOSExecutor {
547
525
  if ((wCtxt.tempWfOperationType !== TempWorkflowType.transaction
548
526
  && wCtxt.tempWfOperationType !== TempWorkflowType.procedure)
549
527
  || params.queueName !== undefined) {
550
- // TODO: Make this transactional (and with the queue step below)
551
- const ires = await this.systemDatabase.initWorkflowStatus(internalStatus, args);
552
- args = ires.args;
553
- status = ires.status;
554
- await (0, debugpoint_1.debugTriggerPoint)(debugpoint_1.DEBUG_TRIGGER_WORKFLOW_ENQUEUE);
528
+ if (this.debugMode) {
529
+ const wfStatus = await this.systemDatabase.getWorkflowStatus(workflowUUID);
530
+ const wfInputs = await this.systemDatabase.getWorkflowInputs(workflowUUID);
531
+ if (!wfStatus || !wfInputs) {
532
+ throw new error_1.DBOSDebuggerError(`Failed to find inputs for workflow UUID ${workflowUUID}`);
533
+ }
534
+ // Make sure we use the same input.
535
+ if (utils_1.DBOSJSON.stringify(args) !== utils_1.DBOSJSON.stringify(wfInputs)) {
536
+ throw new error_1.DBOSDebuggerError(`Detected different inputs for workflow UUID ${workflowUUID}.\n Received: ${utils_1.DBOSJSON.stringify(args)}\n Original: ${utils_1.DBOSJSON.stringify(wfInputs)}`);
537
+ }
538
+ status = wfStatus.status;
539
+ }
540
+ else {
541
+ // TODO: Make this transactional (and with the queue step below)
542
+ const ires = await this.systemDatabase.initWorkflowStatus(internalStatus, args);
543
+ args = ires.args;
544
+ status = ires.status;
545
+ await (0, debugpoint_1.debugTriggerPoint)(debugpoint_1.DEBUG_TRIGGER_WORKFLOW_ENQUEUE);
546
+ }
555
547
  }
556
548
  const runWorkflow = async () => {
557
549
  let result;
558
550
  // Execute the workflow.
559
551
  try {
560
- let cresult;
561
- await (0, context_1.runWithWorkflowContext)(wCtxt, async () => {
562
- if (passContext) {
563
- cresult = await wf.call(params.configuredInstance, wCtxt, ...args);
564
- }
565
- else {
566
- cresult = await wf.call(params.configuredInstance, ...args);
567
- }
552
+ const callResult = await (0, context_1.runWithWorkflowContext)(wCtxt, async () => {
553
+ const callPromise = passContext
554
+ ? wf.call(params.configuredInstance, wCtxt, ...args)
555
+ : wf.call(params.configuredInstance, ...args);
556
+ return await callPromise;
568
557
  });
569
- result = cresult;
558
+ if (this.debugMode) {
559
+ const recordedResult = await this.systemDatabase.getWorkflowResult(workflowUUID);
560
+ if (utils_1.DBOSJSON.stringify(callResult) !== utils_1.DBOSJSON.stringify(recordedResult)) {
561
+ this.logger.error(`Detect different output for the workflow UUID ${workflowUUID}!\n Received: ${utils_1.DBOSJSON.stringify(callResult)}\n Original: ${utils_1.DBOSJSON.stringify(recordedResult)}`);
562
+ }
563
+ result = recordedResult;
564
+ }
565
+ else {
566
+ result = callResult;
567
+ }
570
568
  internalStatus.output = result;
571
569
  internalStatus.status = workflow_1.StatusString.SUCCESS;
572
- if (internalStatus.queueName) {
570
+ if (internalStatus.queueName && !this.debugMode) {
573
571
  // Now... the workflow isn't certainly done.
574
572
  // But waiting this long is for concurrency control anyway,
575
573
  // so it is probably done enough.
576
574
  await this.systemDatabase.dequeueWorkflow(workflowUUID, this.#getQueueByName(internalStatus.queueName));
577
575
  }
578
- this.systemDatabase.bufferWorkflowOutput(workflowUUID, internalStatus);
576
+ if (!this.debugMode) {
577
+ this.systemDatabase.bufferWorkflowOutput(workflowUUID, internalStatus);
578
+ }
579
579
  wCtxt.span.setStatus({ code: api_1.SpanStatusCode.OK });
580
580
  }
581
581
  catch (err) {
@@ -596,10 +596,12 @@ class DBOSExecutor {
596
596
  }
597
597
  internalStatus.error = utils_1.DBOSJSON.stringify((0, serialize_error_1.serializeError)(e));
598
598
  internalStatus.status = workflow_1.StatusString.ERROR;
599
- if (internalStatus.queueName) {
599
+ if (internalStatus.queueName && !this.debugMode) {
600
600
  await this.systemDatabase.dequeueWorkflow(workflowUUID, this.#getQueueByName(internalStatus.queueName));
601
601
  }
602
- await this.systemDatabase.recordWorkflowError(workflowUUID, internalStatus);
602
+ if (!this.debugMode) {
603
+ await this.systemDatabase.recordWorkflowError(workflowUUID, internalStatus);
604
+ }
603
605
  // TODO: Log errors, but not in the tests when they're expected.
604
606
  wCtxt.span.setStatus({ code: api_1.SpanStatusCode.ERROR, message: e.message });
605
607
  throw err;
@@ -611,7 +613,9 @@ class DBOSExecutor {
611
613
  || wCtxt.tempWfOperationType === TempWorkflowType.procedure) {
612
614
  // For single-transaction workflows, asynchronously record inputs.
613
615
  // We must buffer inputs after workflow status is buffered/flushed because workflow_inputs table has a foreign key reference to the workflow_status table.
614
- this.systemDatabase.bufferWorkflowInputs(workflowUUID, args);
616
+ if (!this.debugMode) {
617
+ this.systemDatabase.bufferWorkflowInputs(workflowUUID, args);
618
+ }
615
619
  }
616
620
  }
617
621
  // Asynchronously flush the result buffer.
@@ -620,7 +624,7 @@ class DBOSExecutor {
620
624
  }
621
625
  return result;
622
626
  };
623
- if (status !== 'SUCCESS' && status !== 'ERROR' && (params.queueName === undefined || params.executeWorkflow)) {
627
+ if (this.debugMode || (status !== 'SUCCESS' && status !== 'ERROR' && (params.queueName === undefined || params.executeWorkflow))) {
624
628
  const workflowPromise = runWorkflow();
625
629
  // Need to await for the workflow and capture errors.
626
630
  const awaitWorkflowPromise = workflowPromise
@@ -636,7 +640,7 @@ class DBOSExecutor {
636
640
  return new workflow_1.InvokedHandle(this.systemDatabase, workflowPromise, workflowUUID, wf.name, callerUUID, callerFunctionID);
637
641
  }
638
642
  else {
639
- if (params.queueName && status === 'ENQUEUED') {
643
+ if (params.queueName && status === 'ENQUEUED' && !this.debugMode) {
640
644
  await this.systemDatabase.enqueueWorkflow(workflowUUID, this.#getQueueByName(params.queueName));
641
645
  }
642
646
  return new workflow_1.RetrievedHandle(this.systemDatabase, workflowUUID, callerUUID, callerFunctionID);
@@ -649,45 +653,135 @@ class DBOSExecutor {
649
653
  return q;
650
654
  }
651
655
  /**
652
- * DEBUG MODE workflow execution, skipping all the recording
656
+ * Retrieve the transaction snapshot information of the current transaction
657
+ */
658
+ static async #retrieveSnapshot(query) {
659
+ const rows = await query("SELECT pg_current_snapshot()::text as txn_snapshot;", []);
660
+ return rows[0].txn_snapshot;
661
+ }
662
+ /**
663
+ * Check if an operation has already executed in a workflow.
664
+ * If it previously executed successfully, return its output.
665
+ * If it previously executed and threw an error, throw that error.
666
+ * Otherwise, return DBOSNull.
667
+ * Also return the transaction snapshot information of this current transaction.
653
668
  */
654
- async debugWorkflow(wf, params, callerUUID, callerFunctionID, ...args) {
655
- // In debug mode, we must have a specific workflow UUID.
656
- if (!params.workflowUUID) {
657
- throw new error_1.DBOSDebuggerError("Workflow UUID not found!");
669
+ async #checkExecution(query, workflowUUID, funcID) {
670
+ // Note: we read the current snapshot, not the recorded one!
671
+ const rows = await query("(SELECT output, error, txn_snapshot, true as recorded FROM dbos.transaction_outputs WHERE workflow_uuid=$1 AND function_id=$2 UNION ALL SELECT null as output, null as error, pg_current_snapshot()::text as txn_snapshot, false as recorded) ORDER BY recorded", [workflowUUID, funcID]);
672
+ if (rows.length === 0 || rows.length > 2) {
673
+ this.logger.error("Unexpected! This should never happen. Returned rows: " + rows.toString());
674
+ throw new error_1.DBOSError("This should never happen. Returned rows: " + rows.toString());
675
+ }
676
+ const res = {
677
+ output: exports.dbosNull,
678
+ txn_snapshot: ""
679
+ };
680
+ // recorded=false row will be first because we used ORDER BY.
681
+ res.txn_snapshot = rows[0].txn_snapshot;
682
+ if (rows.length === 2) {
683
+ if (utils_1.DBOSJSON.parse(rows[1].error) !== null) {
684
+ throw (0, serialize_error_1.deserializeError)(utils_1.DBOSJSON.parse(rows[1].error));
685
+ }
686
+ else {
687
+ res.output = utils_1.DBOSJSON.parse(rows[1].output);
688
+ }
658
689
  }
659
- const workflowUUID = params.workflowUUID;
660
- const wInfo = this.getWorkflowInfo(wf);
661
- if (wInfo === undefined) {
662
- throw new error_1.DBOSDebuggerError("Workflow unregistered! " + wf.name);
690
+ return res;
691
+ }
692
+ /**
693
+ * Write a operation's output to the database.
694
+ */
695
+ async #recordOutput(query, workflowUUID, funcID, txnSnapshot, output, isKeyConflict) {
696
+ if (this.debugMode) {
697
+ throw new error_1.DBOSDebuggerError("Cannot record output in debug mode.");
663
698
  }
664
- const wConfig = wInfo.config;
665
- const wCtxt = new debug_workflow_1.WorkflowContextDebug(this, params.parentCtx, workflowUUID, wConfig, wf.name);
666
- // A workflow must have run before.
667
- const wfStatus = await this.systemDatabase.getWorkflowStatus(workflowUUID);
668
- const recordedInputs = await this.systemDatabase.getWorkflowInputs(workflowUUID);
669
- if (!wfStatus || !recordedInputs) {
670
- throw new error_1.DBOSDebuggerError("Workflow status or inputs not found! UUID: " + workflowUUID);
671
- }
672
- // Make sure we use the same input.
673
- if (utils_1.DBOSJSON.stringify(args) !== utils_1.DBOSJSON.stringify(recordedInputs)) {
674
- throw new error_1.DBOSDebuggerError(`Detect different input for the workflow UUID ${workflowUUID}!\n Received: ${utils_1.DBOSJSON.stringify(args)}\n Original: ${utils_1.DBOSJSON.stringify(recordedInputs)}`);
675
- }
676
- const workflowPromise = (0, context_1.runWithWorkflowContext)(wCtxt, async () => {
677
- return await wf.call(params.configuredInstance, wCtxt, ...args)
678
- .then(async (result) => {
679
- // Check if the result is the same.
680
- const recordedResult = await this.systemDatabase.getWorkflowResult(workflowUUID);
681
- if (result === undefined && !recordedResult) {
682
- return result;
683
- }
684
- if (utils_1.DBOSJSON.stringify(result) !== utils_1.DBOSJSON.stringify(recordedResult)) {
685
- this.logger.error(`Detect different output for the workflow UUID ${workflowUUID}!\n Received: ${utils_1.DBOSJSON.stringify(result)}\n Original: ${utils_1.DBOSJSON.stringify(recordedResult)}`);
699
+ try {
700
+ const serialOutput = utils_1.DBOSJSON.stringify(output);
701
+ const rows = await query("INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, output, txn_id, txn_snapshot, created_at) VALUES ($1, $2, $3, (select pg_current_xact_id_if_assigned()::text), $4, $5) RETURNING txn_id;", [workflowUUID, funcID, serialOutput, txnSnapshot, Date.now()]);
702
+ return rows[0].txn_id;
703
+ }
704
+ catch (error) {
705
+ if (isKeyConflict(error)) {
706
+ // Serialization and primary key conflict (Postgres).
707
+ throw new error_1.DBOSWorkflowConflictUUIDError(workflowUUID);
708
+ }
709
+ else {
710
+ throw error;
711
+ }
712
+ }
713
+ }
714
+ /**
715
+ * Record an error in an operation to the database.
716
+ */
717
+ async #recordError(query, workflowUUID, funcID, txnSnapshot, err, isKeyConflict) {
718
+ if (this.debugMode) {
719
+ throw new error_1.DBOSDebuggerError("Cannot record error in debug mode.");
720
+ }
721
+ try {
722
+ const serialErr = utils_1.DBOSJSON.stringify((0, serialize_error_1.serializeError)(err));
723
+ await query("INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, error, txn_id, txn_snapshot, created_at) VALUES ($1, $2, $3, null, $4, $5) RETURNING txn_id;", [workflowUUID, funcID, serialErr, txnSnapshot, Date.now()]);
724
+ }
725
+ catch (error) {
726
+ if (isKeyConflict(error)) {
727
+ // Serialization and primary key conflict (Postgres).
728
+ throw new error_1.DBOSWorkflowConflictUUIDError(workflowUUID);
729
+ }
730
+ else {
731
+ throw error;
732
+ }
733
+ }
734
+ }
735
+ /**
736
+ * Write all entries in the workflow result buffer to the database.
737
+ * If it encounters a primary key error, this indicates a concurrent execution with the same UUID, so throw an DBOSError.
738
+ */
739
+ async #flushResultBuffer(query, resultBuffer, workflowUUID, isKeyConflict) {
740
+ const funcIDs = Array.from(resultBuffer.keys());
741
+ if (funcIDs.length === 0) {
742
+ return;
743
+ }
744
+ if (this.debugMode) {
745
+ throw new error_1.DBOSDebuggerError("Cannot flush result buffer in debug mode.");
746
+ }
747
+ funcIDs.sort();
748
+ try {
749
+ let sqlStmt = "INSERT INTO dbos.transaction_outputs (workflow_uuid, function_id, output, error, txn_id, txn_snapshot, created_at) VALUES ";
750
+ let paramCnt = 1;
751
+ const values = [];
752
+ for (const funcID of funcIDs) {
753
+ // Capture output and also transaction snapshot information.
754
+ // Initially, no txn_id because no queries executed.
755
+ const recorded = resultBuffer.get(funcID);
756
+ const output = recorded.output;
757
+ const txnSnapshot = recorded.txn_snapshot;
758
+ const createdAt = recorded.created_at;
759
+ if (paramCnt > 1) {
760
+ sqlStmt += ", ";
686
761
  }
687
- return recordedResult; // Always return the recorded result.
688
- });
689
- });
690
- return new workflow_1.InvokedHandle(this.systemDatabase, workflowPromise, workflowUUID, wf.name, callerUUID, callerFunctionID);
762
+ sqlStmt += `($${paramCnt++}, $${paramCnt++}, $${paramCnt++}, $${paramCnt++}, null, $${paramCnt++}, $${paramCnt++})`;
763
+ values.push(workflowUUID, funcID, utils_1.DBOSJSON.stringify(output), utils_1.DBOSJSON.stringify(null), txnSnapshot, createdAt);
764
+ }
765
+ this.logger.debug(sqlStmt);
766
+ await query(sqlStmt, values);
767
+ }
768
+ catch (error) {
769
+ if (isKeyConflict(error)) {
770
+ // Serialization and primary key conflict (Postgres).
771
+ throw new error_1.DBOSWorkflowConflictUUIDError(workflowUUID);
772
+ }
773
+ else {
774
+ throw error;
775
+ }
776
+ }
777
+ }
778
+ flushResultBuffer(client, resultBuffer, workflowUUID) {
779
+ const func = (sql, args) => this.userDatabase.queryWithClient(client, sql, ...args);
780
+ return this.#flushResultBuffer(func, resultBuffer, workflowUUID, (error) => this.userDatabase.isKeyConflictError(error));
781
+ }
782
+ #flushResultBufferProc(client, resultBuffer, workflowUUID) {
783
+ const func = (sql, args) => client.query(sql, args).then(v => v.rows);
784
+ return this.#flushResultBuffer(func, resultBuffer, workflowUUID, user_database_1.pgNodeIsKeyConflictError);
691
785
  }
692
786
  async transaction(txn, params, ...args) {
693
787
  // Create a workflow and call transaction.
@@ -729,7 +823,8 @@ class DBOSExecutor {
729
823
  // If the UUID is preset, it is possible this execution previously happened. Check, and return its original result if it did.
730
824
  // Note: It is possible to retrieve a generated ID from a workflow handle, run a concurrent execution, and cause trouble for yourself. We recommend against this.
731
825
  if (wfCtx.presetUUID) {
732
- const check = await wfCtx.checkTxExecution(client, funcId);
826
+ const func = (sql, args) => this.userDatabase.queryWithClient(client, sql, ...args);
827
+ const check = await this.#checkExecution(func, workflowUUID, funcId);
733
828
  txn_snapshot = check.txn_snapshot;
734
829
  if (check.output !== exports.dbosNull) {
735
830
  tCtxt.span.setAttribute("cached", true);
@@ -740,11 +835,15 @@ class DBOSExecutor {
740
835
  }
741
836
  else {
742
837
  // Collect snapshot information for read-only transactions and non-preset UUID transactions, if not already collected above
743
- txn_snapshot = await wfCtx.retrieveTxSnapshot(client);
838
+ const func = (sql, args) => this.userDatabase.queryWithClient(client, sql, ...args);
839
+ txn_snapshot = await DBOSExecutor.#retrieveSnapshot(func);
840
+ }
841
+ if (this.debugMode) {
842
+ throw new error_1.DBOSDebuggerError(`Failed to find inputs for workflow UUID ${workflowUUID}`);
744
843
  }
745
844
  // For non-read-only transactions, flush the result buffer.
746
845
  if (!readOnly) {
747
- await wfCtx.flushResultBuffer(client);
846
+ await this.flushResultBuffer(client, wfCtx.resultBuffer, wfCtx.workflowUUID);
748
847
  }
749
848
  // Execute the user's transaction.
750
849
  let cresult;
@@ -773,7 +872,8 @@ class DBOSExecutor {
773
872
  else {
774
873
  try {
775
874
  // Synchronously record the output of write transactions and obtain the transaction ID.
776
- const pg_txn_id = await wfCtx.recordOutputTx(client, funcId, txn_snapshot, result);
875
+ const func = (sql, args) => this.userDatabase.queryWithClient(client, sql, ...args);
876
+ const pg_txn_id = await this.#recordOutput(func, wfCtx.workflowUUID, funcId, txn_snapshot, result, (error) => this.userDatabase.isKeyConflictError(error));
777
877
  tCtxt.span.setAttribute("pg_txn_id", pg_txn_id);
778
878
  wfCtx.resultBuffer.clear();
779
879
  }
@@ -796,6 +896,9 @@ class DBOSExecutor {
796
896
  return result;
797
897
  }
798
898
  catch (err) {
899
+ if (this.debugMode) {
900
+ throw err;
901
+ }
799
902
  if (this.userDatabase.isRetriableTransactionError(err)) {
800
903
  // serialization_failure in PostgreSQL
801
904
  span.addEvent("TXN SERIALIZATION FAILURE", { "retryWaitMillis": retryWaitMillis }, performance.now());
@@ -808,8 +911,9 @@ class DBOSExecutor {
808
911
  // Record and throw other errors.
809
912
  const e = err;
810
913
  await this.userDatabase.transaction(async (client) => {
811
- await wfCtx.flushResultBuffer(client);
812
- await wfCtx.recordErrorTx(client, funcId, txn_snapshot, e);
914
+ await this.flushResultBuffer(client, wfCtx.resultBuffer, wfCtx.workflowUUID);
915
+ const func = (sql, args) => this.userDatabase.queryWithClient(client, sql, ...args);
916
+ await this.#recordError(func, wfCtx.workflowUUID, funcId, txn_snapshot, e, (error) => this.userDatabase.isKeyConflictError(error));
813
917
  }, { isolationLevel: transaction_1.IsolationLevel.ReadCommitted });
814
918
  wfCtx.resultBuffer.clear();
815
919
  span.setStatus({ code: api_1.SpanStatusCode.ERROR, message: e.message });
@@ -822,15 +926,215 @@ class DBOSExecutor {
822
926
  // Create a workflow and call procedure.
823
927
  const temp_workflow = async (ctxt, ...args) => {
824
928
  const ctxtImpl = ctxt;
825
- return await ctxtImpl.procedure(proc, ...args);
929
+ return this.callProcedureFunction(proc, ctxtImpl, ...args);
826
930
  };
827
- return (await this.workflow(temp_workflow, { ...params,
931
+ return await (await this.workflow(temp_workflow, {
932
+ ...params,
828
933
  tempWfType: TempWorkflowType.procedure,
829
934
  tempWfName: (0, decorators_1.getRegisteredMethodName)(proc),
830
935
  tempWfClass: (0, decorators_1.getRegisteredMethodClassName)(proc),
831
936
  }, ...args)).getResult();
832
937
  }
833
- async executeProcedure(func, config) {
938
+ async callProcedureFunction(proc, wfCtx, ...args) {
939
+ const procInfo = this.getProcedureInfo(proc);
940
+ if (procInfo === undefined) {
941
+ throw new error_1.DBOSNotRegisteredError(proc.name);
942
+ }
943
+ const executeLocally = this.debugMode || (procInfo.config.executeLocally ?? false);
944
+ const funcId = wfCtx.functionIDGetIncrement();
945
+ const span = this.tracer.startSpan(proc.name, {
946
+ operationUUID: wfCtx.workflowUUID,
947
+ operationType: exports.OperationType.PROCEDURE,
948
+ authenticatedUser: wfCtx.authenticatedUser,
949
+ assumedRole: wfCtx.assumedRole,
950
+ authenticatedRoles: wfCtx.authenticatedRoles,
951
+ readOnly: procInfo.config.readOnly ?? false,
952
+ isolationLevel: procInfo.config.isolationLevel,
953
+ executeLocally,
954
+ }, wfCtx.span);
955
+ try {
956
+ const result = executeLocally
957
+ ? await this.#callProcedureFunctionLocal(proc, args, wfCtx, span, procInfo, funcId)
958
+ : await this.#callProcedureFunctionRemote(proc, args, wfCtx, span, procInfo.config, funcId);
959
+ span.setStatus({ code: api_1.SpanStatusCode.OK });
960
+ return result;
961
+ }
962
+ catch (e) {
963
+ const { message } = e;
964
+ span.setStatus({ code: api_1.SpanStatusCode.ERROR, message });
965
+ throw e;
966
+ }
967
+ finally {
968
+ this.tracer.endSpan(span);
969
+ }
970
+ }
971
+ async #callProcedureFunctionLocal(proc, args, wfCtx, span, procInfo, funcId) {
972
+ let retryWaitMillis = 1;
973
+ const backoffFactor = 1.5;
974
+ const maxRetryWaitMs = 2000; // Maximum wait 2 seconds.
975
+ const readOnly = procInfo.config.readOnly ?? false;
976
+ while (true) {
977
+ let txn_snapshot = "invalid";
978
+ const wrappedProcedure = async (client) => {
979
+ const ctxt = new procedure_1.StoredProcedureContextImpl(client, wfCtx, span, this.logger, funcId, proc.name);
980
+ if (wfCtx.presetUUID) {
981
+ const func = (sql, args) => this.procedurePool.query(sql, args).then(v => v.rows);
982
+ const check = await this.#checkExecution(func, wfCtx.workflowUUID, funcId);
983
+ txn_snapshot = check.txn_snapshot;
984
+ if (check.output !== exports.dbosNull) {
985
+ ctxt.span.setAttribute("cached", true);
986
+ ctxt.span.setStatus({ code: api_1.SpanStatusCode.OK });
987
+ this.tracer.endSpan(ctxt.span);
988
+ return check.output;
989
+ }
990
+ }
991
+ else {
992
+ // Collect snapshot information for read-only transactions and non-preset UUID transactions, if not already collected above
993
+ const func = (sql, args) => this.procedurePool.query(sql, args).then(v => v.rows);
994
+ txn_snapshot = await DBOSExecutor.#retrieveSnapshot(func);
995
+ }
996
+ if (this.debugMode) {
997
+ throw new error_1.DBOSDebuggerError(`Failed to find inputs for workflow UUID ${wfCtx.workflowUUID}`);
998
+ }
999
+ // For non-read-only transactions, flush the result buffer.
1000
+ if (!readOnly) {
1001
+ await this.#flushResultBufferProc(client, wfCtx.resultBuffer, wfCtx.workflowUUID);
1002
+ }
1003
+ let cresult;
1004
+ if (procInfo.registration.passContext) {
1005
+ await (0, context_1.runWithStoredProcContext)(ctxt, async () => {
1006
+ cresult = await proc(ctxt, ...args);
1007
+ });
1008
+ }
1009
+ else {
1010
+ await (0, context_1.runWithStoredProcContext)(ctxt, async () => {
1011
+ const pf = proc;
1012
+ cresult = await pf(...args);
1013
+ });
1014
+ }
1015
+ const result = cresult;
1016
+ if (readOnly) {
1017
+ // Buffer the output of read-only transactions instead of synchronously writing it.
1018
+ const readOutput = {
1019
+ output: result,
1020
+ txn_snapshot: txn_snapshot,
1021
+ created_at: Date.now(),
1022
+ };
1023
+ wfCtx.resultBuffer.set(funcId, readOutput);
1024
+ }
1025
+ else {
1026
+ // Synchronously record the output of write transactions and obtain the transaction ID.
1027
+ const func = (sql, args) => client.query(sql, args).then(v => v.rows);
1028
+ const pg_txn_id = await this.#recordOutput(func, wfCtx.workflowUUID, funcId, txn_snapshot, result, user_database_1.pgNodeIsKeyConflictError);
1029
+ // const pg_txn_id = await wfCtx.recordOutputProc<R>(client, funcId, txn_snapshot, result);
1030
+ ctxt.span.setAttribute("pg_txn_id", pg_txn_id);
1031
+ wfCtx.resultBuffer.clear();
1032
+ }
1033
+ return result;
1034
+ };
1035
+ try {
1036
+ const result = await this.invokeStoredProcFunction(wrappedProcedure, { isolationLevel: procInfo.config.isolationLevel });
1037
+ span.setStatus({ code: api_1.SpanStatusCode.OK });
1038
+ return result;
1039
+ }
1040
+ catch (err) {
1041
+ if (this.userDatabase.isRetriableTransactionError(err)) {
1042
+ // serialization_failure in PostgreSQL
1043
+ span.addEvent("TXN SERIALIZATION FAILURE", { "retryWaitMillis": retryWaitMillis }, performance.now());
1044
+ // Retry serialization failures.
1045
+ await (0, utils_1.sleepms)(retryWaitMillis);
1046
+ retryWaitMillis *= backoffFactor;
1047
+ retryWaitMillis = retryWaitMillis < maxRetryWaitMs ? retryWaitMillis : maxRetryWaitMs;
1048
+ continue;
1049
+ }
1050
+ // Record and throw other errors.
1051
+ const e = err;
1052
+ await this.invokeStoredProcFunction(async (client) => {
1053
+ await this.#flushResultBufferProc(client, wfCtx.resultBuffer, wfCtx.workflowUUID);
1054
+ const func = (sql, args) => client.query(sql, args).then(v => v.rows);
1055
+ await this.#recordError(func, wfCtx.workflowUUID, funcId, txn_snapshot, e, user_database_1.pgNodeIsKeyConflictError);
1056
+ }, { isolationLevel: transaction_1.IsolationLevel.ReadCommitted });
1057
+ await this.userDatabase.transaction(async (client) => {
1058
+ await this.flushResultBuffer(client, wfCtx.resultBuffer, wfCtx.workflowUUID);
1059
+ const func = (sql, args) => this.userDatabase.queryWithClient(client, sql, ...args);
1060
+ await this.#recordError(func, wfCtx.workflowUUID, funcId, txn_snapshot, e, (error) => this.userDatabase.isKeyConflictError(error));
1061
+ }, { isolationLevel: transaction_1.IsolationLevel.ReadCommitted });
1062
+ wfCtx.resultBuffer.clear();
1063
+ throw err;
1064
+ }
1065
+ }
1066
+ }
1067
+ async #callProcedureFunctionRemote(proc, args, wfCtx, span, config, funcId) {
1068
+ if (this.debugMode) {
1069
+ throw new error_1.DBOSDebuggerError("Can't invoke stored procedure in debug mode.");
1070
+ }
1071
+ const readOnly = config.readOnly ?? false;
1072
+ const $jsonCtx = {
1073
+ request: wfCtx.request,
1074
+ authenticatedUser: wfCtx.authenticatedUser,
1075
+ authenticatedRoles: wfCtx.authenticatedRoles,
1076
+ assumedRole: wfCtx.assumedRole,
1077
+ };
1078
+ // Note, node-pg converts JS arrays to postgres array literals, so must call JSON.strigify on
1079
+ // args and bufferedResults before being passed to #invokeStoredProc
1080
+ const $args = [wfCtx.workflowUUID, funcId, wfCtx.presetUUID, $jsonCtx, null, JSON.stringify(args)];
1081
+ if (!readOnly) {
1082
+ // function_id, output, txn_snapshot, created_at
1083
+ const bufferedResults = new Array();
1084
+ for (const [functionID, { output, txn_snapshot, created_at }] of wfCtx.resultBuffer.entries()) {
1085
+ bufferedResults.push([functionID, output, txn_snapshot, created_at]);
1086
+ }
1087
+ // sort by function ID
1088
+ bufferedResults.sort((a, b) => a[0] - b[0]);
1089
+ $args.unshift(bufferedResults.length > 0 ? JSON.stringify(bufferedResults) : null);
1090
+ }
1091
+ const [{ return_value }] = await this.#invokeStoredProc(proc, $args);
1092
+ const { error, output, txn_snapshot, txn_id, created_at } = return_value;
1093
+ // buffered results are persisted in r/w stored procs, even if it returns an error
1094
+ if (!readOnly) {
1095
+ wfCtx.resultBuffer.clear();
1096
+ }
1097
+ // if the stored proc returns an error, deserialize and throw it.
1098
+ // stored proc saves the error in tx_output before returning
1099
+ if (error) {
1100
+ throw (0, serialize_error_1.deserializeError)(error);
1101
+ }
1102
+ // if txn_snapshot is provided, the output needs to be buffered
1103
+ if (readOnly && txn_snapshot) {
1104
+ wfCtx.resultBuffer.set(funcId, {
1105
+ output,
1106
+ txn_snapshot,
1107
+ created_at: created_at ?? Date.now(),
1108
+ });
1109
+ }
1110
+ if (!readOnly) {
1111
+ wfCtx.resultBuffer.clear();
1112
+ }
1113
+ if (txn_id) {
1114
+ span.setAttribute("pg_txn_id", txn_id);
1115
+ }
1116
+ span.setStatus({ code: api_1.SpanStatusCode.OK });
1117
+ return output;
1118
+ }
1119
+ async #invokeStoredProc(proc, args) {
1120
+ const client = await this.procedurePool.connect();
1121
+ const log = (msg) => this.#logNotice(msg);
1122
+ const procClassName = this.getProcedureClassName(proc);
1123
+ const plainProcName = `${procClassName}_${proc.name}_p`;
1124
+ const procName = this.config.appVersion
1125
+ ? `v${this.config.appVersion}_${plainProcName}`
1126
+ : plainProcName;
1127
+ const sql = `CALL "${procName}"(${args.map((_v, i) => `$${i + 1}`).join()});`;
1128
+ try {
1129
+ client.on('notice', log);
1130
+ return await client.query(sql, args).then(value => value.rows);
1131
+ }
1132
+ finally {
1133
+ client.off('notice', log);
1134
+ client.release();
1135
+ }
1136
+ }
1137
+ async invokeStoredProcFunction(func, config) {
834
1138
  const client = await this.procedurePool.connect();
835
1139
  try {
836
1140
  const readOnly = config.readOnly ?? false;
@@ -889,7 +1193,7 @@ class DBOSExecutor {
889
1193
  }, wfCtx.span);
890
1194
  const ctxt = new step_1.StepContextImpl(wfCtx, funcID, span, this.logger, commInfo.config, stepFn.name);
891
1195
  await this.userDatabase.transaction(async (client) => {
892
- await wfCtx.flushResultBuffer(client);
1196
+ await this.flushResultBuffer(client, wfCtx.resultBuffer, wfCtx.workflowUUID);
893
1197
  }, { isolationLevel: transaction_1.IsolationLevel.ReadCommitted });
894
1198
  wfCtx.resultBuffer.clear();
895
1199
  // Check if this execution previously happened, returning its original result if it did.
@@ -900,6 +1204,9 @@ class DBOSExecutor {
900
1204
  this.tracer.endSpan(ctxt.span);
901
1205
  return check;
902
1206
  }
1207
+ if (this.debugMode) {
1208
+ throw new error_1.DBOSDebuggerError(`Failed to find recorded output for workflow UUID: ${wfCtx.workflowUUID}`);
1209
+ }
903
1210
  // Execute the step function. If it throws an exception, retry with exponential backoff.
904
1211
  // After reaching the maximum number of retries, throw an DBOSError.
905
1212
  let result = exports.dbosNull;
@@ -1045,6 +1352,9 @@ class DBOSExecutor {
1045
1352
  * It runs to completion all pending workflows that were executing when the previous executor failed.
1046
1353
  */
1047
1354
  async recoverPendingWorkflows(executorIDs = ["local"]) {
1355
+ if (this.debugMode) {
1356
+ throw new error_1.DBOSDebuggerError("Cannot recover pending workflows in debug mode.");
1357
+ }
1048
1358
  const pendingWorkflows = [];
1049
1359
  for (const execID of executorIDs) {
1050
1360
  if (execID === "local" && process.env.DBOS__VMID) {
@@ -1196,13 +1506,16 @@ class DBOSExecutor {
1196
1506
  * Periodically flush the workflow output buffer to the system database.
1197
1507
  */
1198
1508
  async flushWorkflowBuffers() {
1199
- if (this.initialized) {
1509
+ if (this.initialized && !this.debugMode) {
1200
1510
  await this.flushWorkflowResultBuffer();
1201
1511
  await this.systemDatabase.flushWorkflowSystemBuffers();
1202
1512
  }
1203
1513
  this.isFlushingBuffers = false;
1204
1514
  }
1205
1515
  async flushWorkflowResultBuffer() {
1516
+ if (this.debugMode) {
1517
+ throw new error_1.DBOSDebuggerError(`Cannot flush workflow result buffer in debug mode.`);
1518
+ }
1206
1519
  const localBuffer = new Map(this.workflowResultBuffer);
1207
1520
  this.workflowResultBuffer.clear();
1208
1521
  const totalSize = localBuffer.size;