@dbos-inc/dbos-sdk 4.9.3-preview → 4.9.4-preview

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/dist/schemas/system_db_schema.d.ts +32 -0
  2. package/dist/schemas/system_db_schema.d.ts.map +1 -1
  3. package/dist/schemas/system_db_schema.js +13 -0
  4. package/dist/schemas/system_db_schema.js.map +1 -1
  5. package/dist/src/client.d.ts +34 -4
  6. package/dist/src/client.d.ts.map +1 -1
  7. package/dist/src/client.js +55 -9
  8. package/dist/src/client.js.map +1 -1
  9. package/dist/src/context.d.ts +2 -0
  10. package/dist/src/context.d.ts.map +1 -1
  11. package/dist/src/context.js.map +1 -1
  12. package/dist/src/dbos-executor.d.ts +3 -3
  13. package/dist/src/dbos-executor.d.ts.map +1 -1
  14. package/dist/src/dbos-executor.js +40 -20
  15. package/dist/src/dbos-executor.js.map +1 -1
  16. package/dist/src/dbos.d.ts +33 -4
  17. package/dist/src/dbos.d.ts.map +1 -1
  18. package/dist/src/dbos.js +24 -12
  19. package/dist/src/dbos.js.map +1 -1
  20. package/dist/src/serialization.d.ts +42 -3
  21. package/dist/src/serialization.d.ts.map +1 -1
  22. package/dist/src/serialization.js +209 -9
  23. package/dist/src/serialization.js.map +1 -1
  24. package/dist/src/sysdb_migrations/internal/migrations.d.ts.map +1 -1
  25. package/dist/src/sysdb_migrations/internal/migrations.js +10 -0
  26. package/dist/src/sysdb_migrations/internal/migrations.js.map +1 -1
  27. package/dist/src/system_database.d.ts +39 -15
  28. package/dist/src/system_database.d.ts.map +1 -1
  29. package/dist/src/system_database.js +90 -67
  30. package/dist/src/system_database.js.map +1 -1
  31. package/dist/src/workflow.d.ts +3 -0
  32. package/dist/src/workflow.d.ts.map +1 -1
  33. package/dist/src/workflow.js +2 -2
  34. package/dist/src/workflow.js.map +1 -1
  35. package/dist/src/workflow_management.d.ts.map +1 -1
  36. package/dist/src/workflow_management.js +7 -5
  37. package/dist/src/workflow_management.js.map +1 -1
  38. package/dist/tsconfig.tsbuildinfo +1 -1
  39. package/package.json +1 -1
@@ -21,6 +21,7 @@ const database_utils_1 = require("./database_utils");
21
21
  const migration_runner_1 = require("./sysdb_migrations/migration_runner");
22
22
  const migrations_1 = require("./sysdb_migrations/internal/migrations");
23
23
  const debugpoint_1 = require("./debugpoint");
24
+ const serialization_1 = require("./serialization");
24
25
  exports.DBOS_FUNCNAME_SEND = 'DBOS.send';
25
26
  exports.DBOS_FUNCNAME_RECV = 'DBOS.recv';
26
27
  exports.DBOS_FUNCNAME_SETEVENT = 'DBOS.setEvent';
@@ -172,8 +173,9 @@ async function insertWorkflowStatus(client, initStatus, schemaName, ownerXid, in
172
173
  queue_partition_key,
173
174
  forked_from,
174
175
  parent_workflow_id,
176
+ serialization,
175
177
  owner_xid
176
- ) VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22, $23, $24, $26)
178
+ ) VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22, $23, $24, $26, $27)
177
179
  ON CONFLICT (workflow_uuid)
178
180
  DO UPDATE SET
179
181
  recovery_attempts = CASE
@@ -187,7 +189,7 @@ async function insertWorkflowStatus(client, initStatus, schemaName, ownerXid, in
187
189
  THEN EXCLUDED.executor_id
188
190
  ELSE workflow_status.executor_id
189
191
  END
190
- RETURNING recovery_attempts, status, name, class_name, config_name, queue_name, workflow_deadline_epoch_ms, executor_id, owner_xid`, [
192
+ RETURNING recovery_attempts, status, name, class_name, config_name, queue_name, workflow_deadline_epoch_ms, executor_id, owner_xid, serialization`, [
191
193
  initStatus.workflowUUID,
192
194
  initStatus.status,
193
195
  initStatus.workflowName,
@@ -214,6 +216,7 @@ async function insertWorkflowStatus(client, initStatus, schemaName, ownerXid, in
214
216
  initStatus.forkedFrom ?? null,
215
217
  initStatus.parentWorkflowID ?? null,
216
218
  (incrementAttempts ?? false) ? 1 : 0,
219
+ initStatus.serialization,
217
220
  ownerXid,
218
221
  ]);
219
222
  if (rows.length === 0) {
@@ -222,6 +225,7 @@ async function insertWorkflowStatus(client, initStatus, schemaName, ownerXid, in
222
225
  const ret = rows[0];
223
226
  ret.class_name = ret.class_name ?? '';
224
227
  ret.config_name = ret.config_name ?? '';
228
+ initStatus.serialization = ret.serialization;
225
229
  return ret;
226
230
  }
227
231
  catch (error) {
@@ -287,8 +291,8 @@ async function updateWorkflowStatus(client, workflowID, status, schemaName, opti
287
291
  async function recordOperationResult(client, workflowID, functionID, functionName, checkConflict, schemaName, startTimeEpochMs, endTimeEpochMs, options = {}) {
288
292
  try {
289
293
  const out = await client.query(`INSERT INTO ${schemaName}.operation_outputs
290
- (workflow_uuid, function_id, output, error, function_name, child_workflow_id, started_at_epoch_ms, completed_at_epoch_ms)
291
- VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
294
+ (workflow_uuid, function_id, output, error, function_name, child_workflow_id, started_at_epoch_ms, completed_at_epoch_ms, serialization)
295
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
292
296
  ON CONFLICT DO NOTHING RETURNING completed_at_epoch_ms;`, [
293
297
  workflowID,
294
298
  functionID,
@@ -298,6 +302,7 @@ async function recordOperationResult(client, workflowID, functionID, functionNam
298
302
  options.childWorkflowID ?? null,
299
303
  startTimeEpochMs,
300
304
  endTimeEpochMs,
305
+ options.serialization ?? null,
301
306
  ]);
302
307
  if (checkConflict && (out?.rowCount ?? 0) > 0 && Number(out?.rows?.[0]?.completed_at_epoch_ms) !== endTimeEpochMs) {
303
308
  dbos_executor_1.DBOSExecutor.globalInstance?.logger.warn(`Step output for ${workflowID}(${functionID}):${functionName} already recorded`);
@@ -344,6 +349,7 @@ function mapWorkflowStatus(row) {
344
349
  startedAtEpochMs: row.started_at_epoch_ms ? Number(row.started_at_epoch_ms) : undefined,
345
350
  forkedFrom: row.forked_from ?? undefined,
346
351
  parentWorkflowID: row.parent_workflow_id ?? undefined,
352
+ serialization: row.serialization,
347
353
  };
348
354
  }
349
355
  // SQLSTATE classes/codes that are generally safe to retry
@@ -601,7 +607,7 @@ class PostgresSystemDatabase {
601
607
  if (status === workflow_1.StatusString.MAX_RECOVERY_ATTEMPTS_EXCEEDED) {
602
608
  throw new error_1.DBOSMaxRecoveryAttemptsExceededError(initStatus.workflowUUID, options?.maxRetries ?? -1);
603
609
  }
604
- return { status, deadlineEpochMS, shouldExecuteOnThisExecutor: false };
610
+ return { status, deadlineEpochMS, shouldExecuteOnThisExecutor: false, serialization: resRow.serialization };
605
611
  }
606
612
  // Upsert above already set executor assignment and incremented the recovery attempt
607
613
  shouldCommit = true;
@@ -617,7 +623,12 @@ class PostgresSystemDatabase {
617
623
  throw new error_1.DBOSMaxRecoveryAttemptsExceededError(initStatus.workflowUUID, options.maxRetries);
618
624
  }
619
625
  this.logger.debug(`Workflow ${initStatus.workflowUUID} attempt number: ${attempts}.`);
620
- return { status, deadlineEpochMS, shouldExecuteOnThisExecutor: true };
626
+ return {
627
+ status,
628
+ deadlineEpochMS,
629
+ shouldExecuteOnThisExecutor: true,
630
+ serialization: resRow.serialization,
631
+ };
621
632
  }
622
633
  finally {
623
634
  try {
@@ -744,33 +755,34 @@ class PostgresSystemDatabase {
744
755
  priority: 0,
745
756
  queuePartitionKey: undefined,
746
757
  forkedFrom: workflowID,
758
+ serialization: workflowStatus.serialization,
747
759
  }, this.schemaName, null);
748
760
  if (startStep > 0) {
749
761
  // Copy operation outputs
750
762
  const copyOutputsQuery = `INSERT INTO "${this.schemaName}".operation_outputs
751
- (workflow_uuid, function_id, output, error, function_name, child_workflow_id)
752
- SELECT $1 AS workflow_uuid, function_id, output, error, function_name, child_workflow_id
763
+ (workflow_uuid, function_id, output, error, serialization, function_name, child_workflow_id)
764
+ SELECT $1 AS workflow_uuid, function_id, output, error, serialization, function_name, child_workflow_id
753
765
  FROM "${this.schemaName}".operation_outputs
754
766
  WHERE workflow_uuid = $2 AND function_id < $3`;
755
767
  await client.query(copyOutputsQuery, [newWorkflowID, workflowID, startStep]);
756
768
  // Copy streams
757
769
  const copyStreamsQuery = `INSERT INTO "${this.schemaName}".streams
758
- (workflow_uuid, key, value, "offset", function_id)
759
- SELECT $1 AS workflow_uuid, key, value, "offset", function_id
770
+ (workflow_uuid, key, value, serialization, "offset", function_id)
771
+ SELECT $1 AS workflow_uuid, key, value, serialization, "offset", function_id
760
772
  FROM "${this.schemaName}".streams
761
773
  WHERE workflow_uuid = $2 AND function_id < $3`;
762
774
  await client.query(copyStreamsQuery, [newWorkflowID, workflowID, startStep]);
763
775
  // Copy events history
764
776
  const copyEventsHistoryQuery = `INSERT INTO "${this.schemaName}".workflow_events_history
765
- (workflow_uuid, function_id, key, value)
766
- SELECT $1 AS workflow_uuid, function_id, key, value
777
+ (workflow_uuid, function_id, key, value, serialization)
778
+ SELECT $1 AS workflow_uuid, function_id, key, value, serialization
767
779
  FROM "${this.schemaName}".workflow_events_history
768
780
  WHERE workflow_uuid = $2 AND function_id < $3`;
769
781
  await client.query(copyEventsHistoryQuery, [newWorkflowID, workflowID, startStep]);
770
782
  // Copy only the latest version of each event (max function_id per key) into workflow_events
771
783
  const copyLatestEventsQuery = `INSERT INTO "${this.schemaName}".workflow_events
772
- (workflow_uuid, key, value)
773
- SELECT $1 AS workflow_uuid, weh1.key, weh1.value
784
+ (workflow_uuid, key, value, serialization)
785
+ SELECT $1 AS workflow_uuid, weh1.key, weh1.value, serialization
774
786
  FROM "${this.schemaName}".workflow_events_history weh1
775
787
  WHERE weh1.workflow_uuid = $2
776
788
  AND weh1.function_id = (
@@ -846,7 +858,8 @@ class PostgresSystemDatabase {
846
858
  }
847
859
  else {
848
860
  await recordOperationResult(client, workflowID, functionID, exports.DBOS_FUNCNAME_SLEEP, false, this.schemaName, Date.now(), Date.now(), {
849
- output: JSON.stringify(endTimeMs),
861
+ output: serialization_1.DBOSPortableJSON.stringify(endTimeMs),
862
+ serialization: serialization_1.DBOSPortableJSON.name(),
850
863
  });
851
864
  }
852
865
  return {
@@ -859,13 +872,13 @@ class PostgresSystemDatabase {
859
872
  }
860
873
  }
861
874
  nullTopic = '__null__topic__';
862
- async send(workflowID, functionID, destinationID, message, topic) {
875
+ async send(workflowID, functionID, destinationID, message, topic, serialization) {
863
876
  topic = topic ?? this.nullTopic;
864
877
  const client = await this.pool.connect();
865
878
  try {
866
879
  await client.query('BEGIN ISOLATION LEVEL READ COMMITTED');
867
880
  await this.#runAndRecordResult(client, exports.DBOS_FUNCNAME_SEND, workflowID, functionID, async () => {
868
- await client.query(`INSERT INTO "${this.schemaName}".notifications (destination_uuid, topic, message) VALUES ($1, $2, $3);`, [destinationID, topic, message]);
881
+ await client.query(`INSERT INTO "${this.schemaName}".notifications (destination_uuid, topic, message, serialization) VALUES ($1, $2, $3, $4);`, [destinationID, topic, message, serialization]);
869
882
  return undefined;
870
883
  });
871
884
  await client.query('COMMIT');
@@ -894,7 +907,7 @@ class PostgresSystemDatabase {
894
907
  if (res.functionName !== exports.DBOS_FUNCNAME_RECV) {
895
908
  throw new error_1.DBOSUnexpectedStepError(workflowID, functionID, exports.DBOS_FUNCNAME_RECV, res.functionName);
896
909
  }
897
- return res.output;
910
+ return { serializedValue: res.output, serialization: res.serialization ?? null };
898
911
  }
899
912
  const timeoutms = timeoutSeconds !== undefined ? timeoutSeconds * 1000 : undefined;
900
913
  let finishTime = timeoutms !== undefined ? Date.now() + timeoutms : undefined;
@@ -948,6 +961,7 @@ class PostgresSystemDatabase {
948
961
  await this.checkIfCanceled(workflowID);
949
962
  // Transactionally consume and return the message if it's in the DB, otherwise return null.
950
963
  let message = null;
964
+ let serialization = null;
951
965
  const client = await this.pool.connect();
952
966
  try {
953
967
  await client.query(`BEGIN ISOLATION LEVEL READ COMMITTED`);
@@ -962,12 +976,14 @@ class PostgresSystemDatabase {
962
976
  ORDER BY created_at_epoch_ms ASC
963
977
  LIMIT 1
964
978
  )
965
- RETURNING notifications.message;`, [workflowID, topic])).rows;
979
+ RETURNING notifications.message, notifications.serialization;`, [workflowID, topic])).rows;
966
980
  if (finalRecvRows.length > 0) {
967
981
  message = finalRecvRows[0].message;
982
+ serialization = finalRecvRows[0].serialization;
968
983
  }
969
984
  await recordOperationResult(client, workflowID, functionID, exports.DBOS_FUNCNAME_RECV, true, this.schemaName, startTime, Date.now(), {
970
985
  output: message,
986
+ serialization,
971
987
  });
972
988
  await client.query(`COMMIT`);
973
989
  }
@@ -979,7 +995,7 @@ class PostgresSystemDatabase {
979
995
  finally {
980
996
  client.release();
981
997
  }
982
- return message;
998
+ return { serializedValue: message, serialization };
983
999
  }
984
1000
  // Only used in tests
985
1001
  async setWorkflowStatus(workflowID, status, resetRecoveryAttempts, internalOptions) {
@@ -993,21 +1009,21 @@ class PostgresSystemDatabase {
993
1009
  client.release();
994
1010
  }
995
1011
  }
996
- async setEvent(workflowID, functionID, key, message) {
1012
+ async setEvent(workflowID, functionID, key, message, serialization) {
997
1013
  const client = await this.pool.connect();
998
1014
  try {
999
1015
  await client.query('BEGIN ISOLATION LEVEL READ COMMITTED');
1000
1016
  await this.#runAndRecordResult(client, exports.DBOS_FUNCNAME_SETEVENT, workflowID, functionID, async () => {
1001
- await client.query(`INSERT INTO "${this.schemaName}".workflow_events (workflow_uuid, key, value)
1002
- VALUES ($1, $2, $3)
1017
+ await client.query(`INSERT INTO "${this.schemaName}".workflow_events (workflow_uuid, key, value, serialization)
1018
+ VALUES ($1, $2, $3, $4)
1003
1019
  ON CONFLICT (workflow_uuid, key)
1004
1020
  DO UPDATE SET value = $3
1005
- RETURNING workflow_uuid;`, [workflowID, key, message]);
1021
+ RETURNING workflow_uuid;`, [workflowID, key, message, serialization]);
1006
1022
  // Also write to the immutable history table for fork support
1007
- await client.query(`INSERT INTO "${this.schemaName}".workflow_events_history (workflow_uuid, function_id, key, value)
1008
- VALUES ($1, $2, $3, $4)
1023
+ await client.query(`INSERT INTO "${this.schemaName}".workflow_events_history (workflow_uuid, function_id, key, value, serialization)
1024
+ VALUES ($1, $2, $3, $4, $5)
1009
1025
  ON CONFLICT (workflow_uuid, function_id, key)
1010
- DO UPDATE SET value = $4;`, [workflowID, functionID, key, message]);
1026
+ DO UPDATE SET value = $4;`, [workflowID, functionID, key, message, serialization]);
1011
1027
  return undefined;
1012
1028
  });
1013
1029
  await client.query('COMMIT');
@@ -1030,11 +1046,12 @@ class PostgresSystemDatabase {
1030
1046
  if (res.functionName !== exports.DBOS_FUNCNAME_GETEVENT) {
1031
1047
  throw new error_1.DBOSUnexpectedStepError(callerWorkflow.workflowID, callerWorkflow.functionID, exports.DBOS_FUNCNAME_GETEVENT, res.functionName);
1032
1048
  }
1033
- return res.output;
1049
+ return { serializedValue: res.output, serialization: null };
1034
1050
  }
1035
1051
  }
1036
1052
  // Get the return the value. if it's in the DB, otherwise return null.
1037
1053
  let value = null;
1054
+ let valueSer = null;
1038
1055
  const payloadKey = `${workflowID}::${key}`;
1039
1056
  const timeoutms = timeoutSeconds !== undefined ? timeoutSeconds * 1000 : undefined;
1040
1057
  let finishTime = timeoutms !== undefined ? Date.now() + timeoutms : undefined;
@@ -1055,11 +1072,12 @@ class PostgresSystemDatabase {
1055
1072
  if (callerWorkflow?.workflowID)
1056
1073
  await this.checkIfCanceled(callerWorkflow?.workflowID);
1057
1074
  // Check if the key is already in the DB, then wait for the notification if it isn't.
1058
- const initRecvRows = (await this.pool.query(`SELECT key, value
1075
+ const initRecvRows = (await this.pool.query(`SELECT key, value, serialization
1059
1076
  FROM "${this.schemaName}".workflow_events
1060
1077
  WHERE workflow_uuid=$1 AND key=$2;`, [workflowID, key])).rows;
1061
1078
  if (initRecvRows.length > 0) {
1062
1079
  value = initRecvRows[0].value;
1080
+ valueSer = initRecvRows[0].serialization;
1063
1081
  break;
1064
1082
  }
1065
1083
  const ct = Date.now();
@@ -1096,9 +1114,12 @@ class PostgresSystemDatabase {
1096
1114
  }
1097
1115
  // Record the output if it is inside a workflow.
1098
1116
  if (callerWorkflow) {
1099
- await this.recordOperationResult(callerWorkflow.workflowID, callerWorkflow.functionID, exports.DBOS_FUNCNAME_GETEVENT, true, startTime, { output: value });
1117
+ await this.recordOperationResult(callerWorkflow.workflowID, callerWorkflow.functionID, exports.DBOS_FUNCNAME_GETEVENT, true, startTime, {
1118
+ output: value,
1119
+ serialization: valueSer,
1120
+ });
1100
1121
  }
1101
- return value;
1122
+ return { serializedValue: value, serialization: valueSer };
1102
1123
  }
1103
1124
  #setWFCancelMap(workflowID) {
1104
1125
  if (this.runningWorkflowMap.has(workflowID)) {
@@ -1261,7 +1282,7 @@ class PostgresSystemDatabase {
1261
1282
  class_name, config_name, recovery_attempts, queue_name,
1262
1283
  workflow_timeout_ms, workflow_deadline_epoch_ms, started_at_epoch_ms,
1263
1284
  deduplication_id, inputs, priority, queue_partition_key, forked_from,
1264
- parent_workflow_id
1285
+ parent_workflow_id, serialization
1265
1286
  FROM "${this.schemaName}".workflow_status
1266
1287
  WHERE workflow_uuid = $1`, [wfID]);
1267
1288
  if (statusResult.rows.length === 0) {
@@ -1271,19 +1292,20 @@ class PostgresSystemDatabase {
1271
1292
  // Export operation_outputs
1272
1293
  const outputsResult = await client.query(`SELECT
1273
1294
  workflow_uuid, function_id, function_name, output, error,
1274
- child_workflow_id, started_at_epoch_ms, completed_at_epoch_ms
1295
+ child_workflow_id, started_at_epoch_ms, completed_at_epoch_ms,
1296
+ serialization
1275
1297
  FROM "${this.schemaName}".operation_outputs
1276
1298
  WHERE workflow_uuid = $1`, [wfID]);
1277
1299
  // Export workflow_events
1278
- const eventsResult = await client.query(`SELECT workflow_uuid, key, value
1300
+ const eventsResult = await client.query(`SELECT workflow_uuid, key, value, serialization
1279
1301
  FROM "${this.schemaName}".workflow_events
1280
1302
  WHERE workflow_uuid = $1`, [wfID]);
1281
1303
  // Export workflow_events_history
1282
- const historyResult = await client.query(`SELECT workflow_uuid, function_id, key, value
1304
+ const historyResult = await client.query(`SELECT workflow_uuid, function_id, key, value, serialization
1283
1305
  FROM "${this.schemaName}".workflow_events_history
1284
1306
  WHERE workflow_uuid = $1`, [wfID]);
1285
1307
  // Export streams
1286
- const streamsResult = await client.query(`SELECT workflow_uuid, key, value, "offset", function_id
1308
+ const streamsResult = await client.query(`SELECT workflow_uuid, key, value, "offset", function_id, serialization
1287
1309
  FROM "${this.schemaName}".streams
1288
1310
  WHERE workflow_uuid = $1`, [wfID]);
1289
1311
  exportedWorkflows.push({
@@ -1314,8 +1336,8 @@ class PostgresSystemDatabase {
1314
1336
  class_name, config_name, recovery_attempts, queue_name,
1315
1337
  workflow_timeout_ms, workflow_deadline_epoch_ms, started_at_epoch_ms,
1316
1338
  deduplication_id, inputs, priority, queue_partition_key, forked_from,
1317
- parent_workflow_id
1318
- ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22, $23, $24, $25, $26, $27)`, [
1339
+ parent_workflow_id, serialization
1340
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22, $23, $24, $25, $26, $27, $28)`, [
1319
1341
  status.workflow_uuid,
1320
1342
  status.status,
1321
1343
  status.name,
@@ -1343,13 +1365,15 @@ class PostgresSystemDatabase {
1343
1365
  status.queue_partition_key,
1344
1366
  status.forked_from,
1345
1367
  status.parent_workflow_id,
1368
+ status.serialization,
1346
1369
  ]);
1347
1370
  // Import operation_outputs
1348
1371
  for (const output of workflow.operation_outputs) {
1349
1372
  await client.query(`INSERT INTO "${this.schemaName}".operation_outputs (
1350
1373
  workflow_uuid, function_id, function_name, output, error,
1351
- child_workflow_id, started_at_epoch_ms, completed_at_epoch_ms
1352
- ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)`, [
1374
+ child_workflow_id, started_at_epoch_ms, completed_at_epoch_ms,
1375
+ serialization
1376
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)`, [
1353
1377
  output.workflow_uuid,
1354
1378
  output.function_id,
1355
1379
  output.function_name,
@@ -1358,25 +1382,26 @@ class PostgresSystemDatabase {
1358
1382
  output.child_workflow_id,
1359
1383
  output.started_at_epoch_ms,
1360
1384
  output.completed_at_epoch_ms,
1385
+ output.serialization,
1361
1386
  ]);
1362
1387
  }
1363
1388
  // Import workflow_events
1364
1389
  for (const event of workflow.workflow_events) {
1365
1390
  await client.query(`INSERT INTO "${this.schemaName}".workflow_events (
1366
- workflow_uuid, key, value
1367
- ) VALUES ($1, $2, $3)`, [event.workflow_uuid, event.key, event.value]);
1391
+ workflow_uuid, key, value, serialization
1392
+ ) VALUES ($1, $2, $3, $4)`, [event.workflow_uuid, event.key, event.value, event.serialization]);
1368
1393
  }
1369
1394
  // Import workflow_events_history
1370
1395
  for (const history of workflow.workflow_events_history) {
1371
1396
  await client.query(`INSERT INTO "${this.schemaName}".workflow_events_history (
1372
- workflow_uuid, function_id, key, value
1373
- ) VALUES ($1, $2, $3, $4)`, [history.workflow_uuid, history.function_id, history.key, history.value]);
1397
+ workflow_uuid, function_id, key, value, serialization
1398
+ ) VALUES ($1, $2, $3, $4, $5)`, [history.workflow_uuid, history.function_id, history.key, history.value, history.serialization]);
1374
1399
  }
1375
1400
  // Import streams
1376
1401
  for (const stream of workflow.streams) {
1377
1402
  await client.query(`INSERT INTO "${this.schemaName}".streams (
1378
- workflow_uuid, key, value, "offset", function_id
1379
- ) VALUES ($1, $2, $3, $4, $5)`, [stream.workflow_uuid, stream.key, stream.value, stream.offset, stream.function_id]);
1403
+ workflow_uuid, key, value, "offset", function_id, serialization
1404
+ ) VALUES ($1, $2, $3, $4, $5, $6)`, [stream.workflow_uuid, stream.key, stream.value, stream.offset, stream.function_id, stream.serialization]);
1380
1405
  }
1381
1406
  }
1382
1407
  await client.query('COMMIT');
@@ -1464,15 +1489,15 @@ class PostgresSystemDatabase {
1464
1489
  if (callerID)
1465
1490
  await this.checkIfCanceled(callerID);
1466
1491
  try {
1467
- const { rows } = await this.pool.query(`SELECT status, output, error FROM "${this.schemaName}".workflow_status
1492
+ const { rows } = await this.pool.query(`SELECT status, output, error, serialization FROM "${this.schemaName}".workflow_status
1468
1493
  WHERE workflow_uuid=$1`, [workflowID]);
1469
1494
  if (rows.length > 0) {
1470
1495
  const status = rows[0].status;
1471
1496
  if (status === workflow_1.StatusString.SUCCESS) {
1472
- return { output: rows[0].output };
1497
+ return { output: rows[0].output, serialization: rows[0].serialization };
1473
1498
  }
1474
1499
  else if (status === workflow_1.StatusString.ERROR) {
1475
- return { error: rows[0].error };
1500
+ return { error: rows[0].error, serialization: rows[0].serialization };
1476
1501
  }
1477
1502
  else if (status === workflow_1.StatusString.CANCELLED) {
1478
1503
  return { cancelled: true };
@@ -1652,6 +1677,9 @@ class PostgresSystemDatabase {
1652
1677
  if (input.loadOutput) {
1653
1678
  selectColumns.push('output', 'error');
1654
1679
  }
1680
+ if (input.loadInput || input.loadOutput) {
1681
+ selectColumns.push('serialization');
1682
+ }
1655
1683
  input.sortDesc = input.sortDesc ?? false; // By default, sort in ascending order
1656
1684
  // Build WHERE clauses
1657
1685
  const whereClauses = [];
@@ -1871,7 +1899,7 @@ class PostgresSystemDatabase {
1871
1899
  // Return the IDs of all functions we marked started
1872
1900
  return claimedIDs;
1873
1901
  }
1874
- async writeStreamFromStep(workflowID, functionID, key, value) {
1902
+ async writeStreamFromStep(workflowID, functionID, key, serializedValue, serialization) {
1875
1903
  const client = await this.pool.connect();
1876
1904
  try {
1877
1905
  await client.query('BEGIN ISOLATION LEVEL READ COMMITTED');
@@ -1881,11 +1909,9 @@ class PostgresSystemDatabase {
1881
1909
  // Next offset is max + 1, or 0 if no records exist
1882
1910
  const maxOffset = maxOffsetResult.rows[0].max;
1883
1911
  const nextOffset = maxOffset !== null ? maxOffset + 1 : 0;
1884
- // Serialize the value before storing
1885
- const serializedValue = JSON.stringify(value);
1886
1912
  // Insert the new stream entry
1887
- await client.query(`INSERT INTO "${this.schemaName}".streams (workflow_uuid, key, value, "offset", function_id)
1888
- VALUES ($1, $2, $3, $4, $5)`, [workflowID, key, serializedValue, nextOffset, functionID]);
1913
+ await client.query(`INSERT INTO "${this.schemaName}".streams (workflow_uuid, key, value, "offset", function_id, serialization)
1914
+ VALUES ($1, $2, $3, $4, $5, $6)`, [workflowID, key, serializedValue, nextOffset, functionID, serialization]);
1889
1915
  await client.query('COMMIT');
1890
1916
  }
1891
1917
  catch (e) {
@@ -1897,11 +1923,10 @@ class PostgresSystemDatabase {
1897
1923
  client.release();
1898
1924
  }
1899
1925
  }
1900
- async writeStreamFromWorkflow(workflowID, functionID, key, value) {
1926
+ async writeStreamFromWorkflow(workflowID, functionID, key, serializedValue, serialization, functionName) {
1901
1927
  const client = await this.pool.connect();
1902
1928
  try {
1903
1929
  await client.query('BEGIN ISOLATION LEVEL READ COMMITTED');
1904
- const functionName = value === exports.DBOS_STREAM_CLOSED_SENTINEL ? exports.DBOS_FUNCNAME_CLOSESTREAM : exports.DBOS_FUNCNAME_WRITESTREAM;
1905
1930
  await this.#runAndRecordResult(client, functionName, workflowID, functionID, async () => {
1906
1931
  // Find the maximum offset for this workflow_uuid and key combination
1907
1932
  const maxOffsetResult = await client.query(`SELECT MAX("offset") FROM "${this.schemaName}".streams
@@ -1909,11 +1934,9 @@ class PostgresSystemDatabase {
1909
1934
  // Next offset is max + 1, or 0 if no records exist
1910
1935
  const maxOffset = maxOffsetResult.rows[0].max;
1911
1936
  const nextOffset = maxOffset !== null ? maxOffset + 1 : 0;
1912
- // Serialize the value before storing
1913
- const serializedValue = JSON.stringify(value);
1914
1937
  // Insert the new stream entry
1915
- await client.query(`INSERT INTO "${this.schemaName}".streams (workflow_uuid, key, value, "offset", function_id)
1916
- VALUES ($1, $2, $3, $4, $5)`, [workflowID, key, serializedValue, nextOffset, functionID]);
1938
+ await client.query(`INSERT INTO "${this.schemaName}".streams (workflow_uuid, key, value, "offset", function_id, serialization)
1939
+ VALUES ($1, $2, $3, $4, $5, $6)`, [workflowID, key, serializedValue, nextOffset, functionID, serialization]);
1917
1940
  return undefined;
1918
1941
  });
1919
1942
  await client.query('COMMIT');
@@ -1928,19 +1951,19 @@ class PostgresSystemDatabase {
1928
1951
  }
1929
1952
  }
1930
1953
  async closeStream(workflowID, functionID, key) {
1931
- await this.writeStreamFromWorkflow(workflowID, functionID, key, exports.DBOS_STREAM_CLOSED_SENTINEL);
1954
+ await this.writeStreamFromWorkflow(workflowID, functionID, key, exports.DBOS_STREAM_CLOSED_SENTINEL, 'portable_json', exports.DBOS_FUNCNAME_CLOSESTREAM);
1932
1955
  }
1933
1956
  async readStream(workflowID, key, offset) {
1934
1957
  const client = await this.pool.connect();
1935
1958
  try {
1936
- const result = await client.query(`SELECT value FROM "${this.schemaName}".streams
1959
+ const result = await client.query(`SELECT value, serialization FROM "${this.schemaName}".streams
1937
1960
  WHERE workflow_uuid = $1 AND key = $2 AND "offset" = $3`, [workflowID, key, offset]);
1938
1961
  if (result.rows.length === 0) {
1939
1962
  throw new Error(`No value found for workflow_uuid=${workflowID}, key=${key}, offset=${offset}`);
1940
1963
  }
1941
1964
  // Deserialize the value before returning
1942
1965
  const row = result.rows[0];
1943
- return JSON.parse(row.value);
1966
+ return { serializedValue: row.value, serialization: row.serialization };
1944
1967
  }
1945
1968
  finally {
1946
1969
  client.release();
@@ -2072,7 +2095,7 @@ __decorate([
2072
2095
  __decorate([
2073
2096
  dbRetry(),
2074
2097
  __metadata("design:type", Function),
2075
- __metadata("design:paramtypes", [String, Number, String, Object, String]),
2098
+ __metadata("design:paramtypes", [String, Number, String, Object, Object, Object]),
2076
2099
  __metadata("design:returntype", Promise)
2077
2100
  ], PostgresSystemDatabase.prototype, "send", null);
2078
2101
  __decorate([
@@ -2084,7 +2107,7 @@ __decorate([
2084
2107
  __decorate([
2085
2108
  dbRetry(),
2086
2109
  __metadata("design:type", Function),
2087
- __metadata("design:paramtypes", [String, Number, String, Object]),
2110
+ __metadata("design:paramtypes", [String, Number, String, Object, Object]),
2088
2111
  __metadata("design:returntype", Promise)
2089
2112
  ], PostgresSystemDatabase.prototype, "setEvent", null);
2090
2113
  __decorate([
@@ -2138,13 +2161,13 @@ __decorate([
2138
2161
  __decorate([
2139
2162
  dbRetry(),
2140
2163
  __metadata("design:type", Function),
2141
- __metadata("design:paramtypes", [String, Number, String, Object]),
2164
+ __metadata("design:paramtypes", [String, Number, String, String, Object]),
2142
2165
  __metadata("design:returntype", Promise)
2143
2166
  ], PostgresSystemDatabase.prototype, "writeStreamFromStep", null);
2144
2167
  __decorate([
2145
2168
  dbRetry(),
2146
2169
  __metadata("design:type", Function),
2147
- __metadata("design:paramtypes", [String, Number, String, Object]),
2170
+ __metadata("design:paramtypes", [String, Number, String, String, Object, String]),
2148
2171
  __metadata("design:returntype", Promise)
2149
2172
  ], PostgresSystemDatabase.prototype, "writeStreamFromWorkflow", null);
2150
2173
  __decorate([