dbos 0.5.0a4__py3-none-any.whl → 0.5.0a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

@@ -3,6 +3,7 @@ from typing import Optional, TypedDict, cast
3
3
  import sqlalchemy as sa
4
4
  import sqlalchemy.dialects.postgresql as pg
5
5
  import sqlalchemy.exc as sa_exc
6
+ from sqlalchemy.exc import DBAPIError
6
7
  from sqlalchemy.orm import Session, sessionmaker
7
8
 
8
9
  from dbos.error import DBOSWorkflowConflictIDError
@@ -95,8 +96,10 @@ class ApplicationDatabase:
95
96
  ),
96
97
  )
97
98
  )
98
- except sa_exc.IntegrityError:
99
- raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
99
+ except DBAPIError as dbapi_error:
100
+ if dbapi_error.orig.pgcode == "23505": # type: ignore
101
+ raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
102
+ raise dbapi_error
100
103
  except Exception as e:
101
104
  raise e
102
105
 
@@ -118,8 +121,10 @@ class ApplicationDatabase:
118
121
  ),
119
122
  )
120
123
  )
121
- except sa_exc.IntegrityError:
122
- raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
124
+ except DBAPIError as dbapi_error:
125
+ if dbapi_error.orig.pgcode == "23505": # type: ignore
126
+ raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
127
+ raise dbapi_error
123
128
  except Exception as e:
124
129
  raise e
125
130
 
dbos/core.py CHANGED
@@ -136,14 +136,18 @@ def _init_workflow(
136
136
  "recovery_attempts": None,
137
137
  }
138
138
 
139
+ # If we have a class name, the first arg is the instance and do not serialize
140
+ if class_name is not None:
141
+ inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
142
+
139
143
  if temp_wf_type != "transaction":
140
- # Don't synchronously record the status or inputs for single transaction workflows
144
+ # Synchronously record the status and inputs for workflows and single-step workflows
145
+ # We also have to do this for single-step workflows because of the foreign key constraint on the operation outputs table
141
146
  dbos.sys_db.update_workflow_status(status, False, ctx.in_recovery)
142
-
143
- # If we have a class name, the first arg is the instance and do not serialize
144
- if class_name is not None:
145
- inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
146
147
  dbos.sys_db.update_workflow_inputs(wfid, utils.serialize(inputs))
148
+ else:
149
+ # Buffer the inputs for single-transaction workflows, but don't buffer the status
150
+ dbos.sys_db.buffer_workflow_inputs(wfid, utils.serialize(inputs))
147
151
 
148
152
  return status
149
153
 
@@ -173,20 +177,6 @@ def _execute_workflow(
173
177
  status["error"] = utils.serialize(error)
174
178
  dbos.sys_db.update_workflow_status(status)
175
179
  raise error
176
- finally:
177
- if get_temp_workflow_type(func) == "transaction":
178
- # Buffer the inputs for single transaction workflows
179
- inputs: WorkflowInputs = {
180
- "args": args,
181
- "kwargs": kwargs,
182
- }
183
- # If we have a class name, the first arg is the instance and do not serialize
184
- class_name = get_dbos_class_name(get_func_info(func), func, args)
185
- if class_name is not None:
186
- inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
187
- dbos.sys_db.buffer_workflow_inputs(
188
- status["workflow_uuid"], utils.serialize(inputs)
189
- )
190
180
 
191
181
  return output
192
182
 
@@ -617,6 +607,7 @@ def _step(
617
607
  utils.serialize(error) if error is not None else None
618
608
  )
619
609
  dbos.sys_db.record_operation_result(step_output)
610
+
620
611
  if error is not None:
621
612
  raise error
622
613
  return output
dbos/system_database.py CHANGED
@@ -4,13 +4,25 @@ import select
4
4
  import threading
5
5
  import time
6
6
  from enum import Enum
7
- from typing import Any, Dict, List, Literal, Optional, Sequence, TypedDict, cast
7
+ from typing import (
8
+ TYPE_CHECKING,
9
+ Any,
10
+ Dict,
11
+ List,
12
+ Literal,
13
+ Optional,
14
+ Sequence,
15
+ Set,
16
+ TypedDict,
17
+ cast,
18
+ )
8
19
 
9
20
  import psycopg2
10
21
  import sqlalchemy as sa
11
22
  import sqlalchemy.dialects.postgresql as pg
12
23
  from alembic import command
13
24
  from alembic.config import Config
25
+ from sqlalchemy.exc import DBAPIError
14
26
 
15
27
  import dbos.utils as utils
16
28
  from dbos.error import (
@@ -199,6 +211,9 @@ class SystemDatabase:
199
211
  # Initialize the workflow status and inputs buffers
200
212
  self._workflow_status_buffer: Dict[str, WorkflowStatusInternal] = {}
201
213
  self._workflow_inputs_buffer: Dict[str, str] = {}
214
+ # Two sets for tracking which single-transaction workflows have been exported to the status table
215
+ self._exported_temp_txn_wf_status: Set[str] = set()
216
+ self._temp_txn_wf_ids: Set[str] = set()
202
217
  self._is_flushing_status_buffer = False
203
218
 
204
219
  # Now we can run background processes
@@ -264,6 +279,10 @@ class SystemDatabase:
264
279
  with self.engine.begin() as c:
265
280
  c.execute(cmd)
266
281
 
282
+ # Record we have exported status for this single-transaction workflow
283
+ if status["workflow_uuid"] in self._temp_txn_wf_ids:
284
+ self._exported_temp_txn_wf_status.add(status["workflow_uuid"])
285
+
267
286
  def set_workflow_status(
268
287
  self,
269
288
  workflow_uuid: str,
@@ -454,6 +473,11 @@ class SystemDatabase:
454
473
  with self.engine.begin() as c:
455
474
  c.execute(cmd)
456
475
 
476
+ if workflow_uuid in self._temp_txn_wf_ids:
477
+ # Clean up the single-transaction tracking sets
478
+ self._exported_temp_txn_wf_status.discard(workflow_uuid)
479
+ self._temp_txn_wf_ids.discard(workflow_uuid)
480
+
457
481
  def get_workflow_inputs(self, workflow_uuid: str) -> Optional[WorkflowInputs]:
458
482
  with self.engine.begin() as c:
459
483
  row = c.execute(
@@ -533,8 +557,10 @@ class SystemDatabase:
533
557
  else:
534
558
  with self.engine.begin() as c:
535
559
  c.execute(sql)
536
- except sa.exc.IntegrityError:
537
- raise DBOSWorkflowConflictIDError(result["workflow_uuid"])
560
+ except DBAPIError as dbapi_error:
561
+ if dbapi_error.orig.pgcode == "23505": # type: ignore
562
+ raise DBOSWorkflowConflictIDError(result["workflow_uuid"])
563
+ raise dbapi_error
538
564
  except Exception as e:
539
565
  raise e
540
566
 
@@ -588,8 +614,11 @@ class SystemDatabase:
588
614
  message=utils.serialize(message),
589
615
  )
590
616
  )
591
- except sa.exc.IntegrityError:
592
- raise DBOSNonExistentWorkflowError(destination_uuid)
617
+ except DBAPIError as dbapi_error:
618
+ # Foreign key violation
619
+ if dbapi_error.orig.pgcode == "23503": # type: ignore
620
+ raise DBOSNonExistentWorkflowError(destination_uuid)
621
+ raise dbapi_error
593
622
  except Exception as e:
594
623
  raise e
595
624
  output: OperationResultInternal = {
@@ -812,8 +841,10 @@ class SystemDatabase:
812
841
  value=utils.serialize(message),
813
842
  )
814
843
  )
815
- except sa.exc.IntegrityError:
816
- raise DBOSDuplicateWorkflowEventError(workflow_uuid, key)
844
+ except DBAPIError as dbapi_error:
845
+ if dbapi_error.orig.pgcode == "23505": # type: ignore
846
+ raise DBOSDuplicateWorkflowEventError(workflow_uuid, key)
847
+ raise dbapi_error
817
848
  except Exception as e:
818
849
  raise e
819
850
  output: OperationResultInternal = {
@@ -897,7 +928,7 @@ class SystemDatabase:
897
928
  return value
898
929
 
899
930
  def _flush_workflow_status_buffer(self) -> None:
900
- """Export the workflow status buffer to the database, up to the batch size."""
931
+ """Export the workflow status buffer to the database, up to the batch size"""
901
932
  if len(self._workflow_status_buffer) == 0:
902
933
  return
903
934
 
@@ -905,13 +936,16 @@ class SystemDatabase:
905
936
  exported_status: Dict[str, WorkflowStatusInternal] = {}
906
937
  with self.engine.begin() as c:
907
938
  exported = 0
908
- local_batch_size = min(
909
- buffer_flush_batch_size, len(self._workflow_status_buffer)
910
- )
911
- while exported < local_batch_size:
939
+ status_iter = iter(list(self._workflow_status_buffer))
940
+ wf_id: Optional[str] = None
941
+ while (
942
+ exported < buffer_flush_batch_size
943
+ and (wf_id := next(status_iter, None)) is not None
944
+ ):
912
945
  # Pop the first key in the buffer (FIFO)
913
- wf_id = next(iter(self._workflow_status_buffer))
914
- status = self._workflow_status_buffer.pop(wf_id)
946
+ status = self._workflow_status_buffer.pop(wf_id, None)
947
+ if status is None:
948
+ continue
915
949
  exported_status[wf_id] = status
916
950
  try:
917
951
  self.update_workflow_status(status, conn=c)
@@ -932,12 +966,18 @@ class SystemDatabase:
932
966
  exported_inputs: Dict[str, str] = {}
933
967
  with self.engine.begin() as c:
934
968
  exported = 0
935
- local_batch_size = min(
936
- buffer_flush_batch_size, len(self._workflow_inputs_buffer)
937
- )
938
- while exported < local_batch_size:
939
- wf_id = next(iter(self._workflow_inputs_buffer))
940
- inputs = self._workflow_inputs_buffer.pop(wf_id)
969
+ input_iter = iter(list(self._workflow_inputs_buffer))
970
+ wf_id: Optional[str] = None
971
+ while (
972
+ exported < buffer_flush_batch_size
973
+ and (wf_id := next(input_iter, None)) is not None
974
+ ):
975
+ if wf_id not in self._exported_temp_txn_wf_status:
976
+ # Skip exporting inputs if the status has not been exported yet
977
+ continue
978
+ inputs = self._workflow_inputs_buffer.pop(wf_id, None)
979
+ if inputs is None:
980
+ continue
941
981
  exported_inputs[wf_id] = inputs
942
982
  try:
943
983
  self.update_workflow_inputs(wf_id, inputs, conn=c)
@@ -972,6 +1012,7 @@ class SystemDatabase:
972
1012
  def buffer_workflow_inputs(self, workflow_id: str, inputs: str) -> None:
973
1013
  # inputs is a serialized WorkflowInputs string
974
1014
  self._workflow_inputs_buffer[workflow_id] = inputs
1015
+ self._temp_txn_wf_ids.add(workflow_id)
975
1016
 
976
1017
  @property
977
1018
  def _is_buffers_empty(self) -> bool:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.5.0a4
3
+ Version: 0.5.0a5
4
4
  Summary: A Python framework for backends that scale
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,13 +1,13 @@
1
- dbos-0.5.0a4.dist-info/METADATA,sha256=l_v0uEPtP7uqtxzLfs6FCFNe3jLyhnG1of_HACqUUyo,5420
2
- dbos-0.5.0a4.dist-info/WHEEL,sha256=rSwsxJWe3vzyR5HCwjWXQruDgschpei4h_giTm0dJVE,90
3
- dbos-0.5.0a4.dist-info/entry_points.txt,sha256=3PmOPbM4FYxEmggRRdJw0oAsiBzKR8U0yx7bmwUmMOM,39
4
- dbos-0.5.0a4.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-0.5.0a5.dist-info/METADATA,sha256=WRKFIucNawLOnizlrKNBp1cTcYW2BcuIcaYaQjXq1FU,5420
2
+ dbos-0.5.0a5.dist-info/WHEEL,sha256=rSwsxJWe3vzyR5HCwjWXQruDgschpei4h_giTm0dJVE,90
3
+ dbos-0.5.0a5.dist-info/entry_points.txt,sha256=3PmOPbM4FYxEmggRRdJw0oAsiBzKR8U0yx7bmwUmMOM,39
4
+ dbos-0.5.0a5.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=X1LdP36NomDtvPfFwoMNtgXf81TO05jj7vltsp79UUw,787
6
6
  dbos/admin_sever.py,sha256=KtzH6aKyskCm4h3yulpy9jb5PIqRlYI2sjctw5mvaKY,3395
7
- dbos/application_database.py,sha256=cBSfyB-5KGFBtigLpFPWEAgmrKONAUnG1825vXPifBk,5411
7
+ dbos/application_database.py,sha256=1K3kE96BgGi_QWOd2heXluyNTwFAwlUVuAR6JKKUqf0,5659
8
8
  dbos/cli.py,sha256=QnbGtZ8S963q3iyFvXNBcL4DB35r4SFMarlb5DRqN6M,7915
9
9
  dbos/context.py,sha256=JZMV2RtSpTK7lnyyWxeBmGPwrZSB00XZEP6R6MT9ygQ,15690
10
- dbos/core.py,sha256=8KkmHYQtxbtfFI_2sAf4DYmiznwYxLLhE6z-FLc3Gho,28675
10
+ dbos/core.py,sha256=HfKnPpIaQqIBAHzP2hD67aSIchTHp87NgD21CcujKkE,28300
11
11
  dbos/dbos-config.schema.json,sha256=azpfmoDZg7WfSy3kvIsk9iEiKB_-VZt03VEOoXJAkqE,5331
12
12
  dbos/dbos.py,sha256=HngS2BUWSbWPmloXGr-KE81BQ8dpZtlvOXM4tx4_Qhg,26246
13
13
  dbos/dbos_config.py,sha256=EkO0c0xaIM7_vAAqqnvNNEAKG5fOJbmmalqnZvaKYZA,5312
@@ -28,7 +28,7 @@ dbos/scheduler/scheduler.py,sha256=uO4_9jmWW2rLv1ODL3lc1cE_37ZaVTgnvmFx_FAlN50,1
28
28
  dbos/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
29
  dbos/schemas/application_database.py,sha256=q_Wr2XbiZNBYFkOtu7uKavo1T_cSOBblxKGHThYGGsY,962
30
30
  dbos/schemas/system_database.py,sha256=5V3vqnEzry0Hn7ZbVS9Gs_dJKia8uX8p7mGC82Ru8rk,4303
31
- dbos/system_database.py,sha256=h04PngaTdHxr1zfXcH6rSdac_vX0mSsD4SWHyOEWpJQ,38147
31
+ dbos/system_database.py,sha256=SK24Avj10rbbWFilVUexdPX6VvOL8zC-CoWDhNQj6QM,39698
32
32
  dbos/templates/hello/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
33
33
  dbos/templates/hello/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
34
  dbos/templates/hello/__package/main.py,sha256=hJgp3S14cseT7zWIZsPwjqdzwTCw1aLo8kPKsTvYz0Y,2976
@@ -42,4 +42,4 @@ dbos/templates/hello/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKs
42
42
  dbos/tracer.py,sha256=RPW9oxmX9tSc0Yq7O-FAhpQWBg1QT7Ni1Q06uwhtNDk,2237
43
43
  dbos/utils.py,sha256=hWj9iWDrby2cVEhb0pG-IdnrxLqP64NhkaWUXiLc8bA,402
44
44
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
45
- dbos-0.5.0a4.dist-info/RECORD,,
45
+ dbos-0.5.0a5.dist-info/RECORD,,
File without changes