dbos 0.5.0a4__tar.gz → 0.5.0a5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (62) hide show
  1. {dbos-0.5.0a4 → dbos-0.5.0a5}/PKG-INFO +1 -1
  2. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/application_database.py +9 -4
  3. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/core.py +10 -19
  4. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/system_database.py +61 -20
  5. {dbos-0.5.0a4 → dbos-0.5.0a5}/pyproject.toml +1 -1
  6. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/test_concurrency.py +25 -12
  7. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/test_dbos.py +4 -0
  8. {dbos-0.5.0a4 → dbos-0.5.0a5}/LICENSE +0 -0
  9. {dbos-0.5.0a4 → dbos-0.5.0a5}/README.md +0 -0
  10. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/__init__.py +0 -0
  11. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/admin_sever.py +0 -0
  12. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/cli.py +0 -0
  13. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/context.py +0 -0
  14. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/dbos-config.schema.json +0 -0
  15. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/dbos.py +0 -0
  16. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/dbos_config.py +0 -0
  17. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/decorators.py +0 -0
  18. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/error.py +0 -0
  19. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/fastapi.py +0 -0
  20. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/logger.py +0 -0
  21. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/migrations/env.py +0 -0
  22. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/migrations/script.py.mako +0 -0
  23. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  24. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  25. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/py.typed +0 -0
  26. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/recovery.py +0 -0
  27. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/registrations.py +0 -0
  28. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/roles.py +0 -0
  29. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/scheduler/croniter.py +0 -0
  30. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/scheduler/scheduler.py +0 -0
  31. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/schemas/__init__.py +0 -0
  32. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/schemas/application_database.py +0 -0
  33. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/schemas/system_database.py +0 -0
  34. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/templates/hello/README.md +0 -0
  35. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/templates/hello/__package/__init__.py +0 -0
  36. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/templates/hello/__package/main.py +0 -0
  37. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/templates/hello/__package/schema.py +0 -0
  38. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/templates/hello/alembic.ini +0 -0
  39. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/templates/hello/dbos-config.yaml.dbos +0 -0
  40. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/templates/hello/migrations/env.py.dbos +0 -0
  41. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/templates/hello/migrations/script.py.mako +0 -0
  42. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/templates/hello/migrations/versions/2024_07_31_180642_init.py +0 -0
  43. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/templates/hello/start_postgres_docker.py +0 -0
  44. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/tracer.py +0 -0
  45. {dbos-0.5.0a4 → dbos-0.5.0a5}/dbos/utils.py +0 -0
  46. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/__init__.py +0 -0
  47. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/atexit_no_ctor.py +0 -0
  48. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/atexit_no_launch.py +0 -0
  49. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/classdefs.py +0 -0
  50. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/conftest.py +0 -0
  51. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/more_classdefs.py +0 -0
  52. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/scheduler/test_croniter.py +0 -0
  53. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/scheduler/test_scheduler.py +0 -0
  54. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/test_admin_server.py +0 -0
  55. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/test_classdecorators.py +0 -0
  56. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/test_config.py +0 -0
  57. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/test_failures.py +0 -0
  58. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/test_fastapi.py +0 -0
  59. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/test_package.py +0 -0
  60. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/test_schema_migration.py +0 -0
  61. {dbos-0.5.0a4 → dbos-0.5.0a5}/tests/test_singleton.py +0 -0
  62. {dbos-0.5.0a4 → dbos-0.5.0a5}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.5.0a4
3
+ Version: 0.5.0a5
4
4
  Summary: A Python framework for backends that scale
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -3,6 +3,7 @@ from typing import Optional, TypedDict, cast
3
3
  import sqlalchemy as sa
4
4
  import sqlalchemy.dialects.postgresql as pg
5
5
  import sqlalchemy.exc as sa_exc
6
+ from sqlalchemy.exc import DBAPIError
6
7
  from sqlalchemy.orm import Session, sessionmaker
7
8
 
8
9
  from dbos.error import DBOSWorkflowConflictIDError
@@ -95,8 +96,10 @@ class ApplicationDatabase:
95
96
  ),
96
97
  )
97
98
  )
98
- except sa_exc.IntegrityError:
99
- raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
99
+ except DBAPIError as dbapi_error:
100
+ if dbapi_error.orig.pgcode == "23505": # type: ignore
101
+ raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
102
+ raise dbapi_error
100
103
  except Exception as e:
101
104
  raise e
102
105
 
@@ -118,8 +121,10 @@ class ApplicationDatabase:
118
121
  ),
119
122
  )
120
123
  )
121
- except sa_exc.IntegrityError:
122
- raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
124
+ except DBAPIError as dbapi_error:
125
+ if dbapi_error.orig.pgcode == "23505": # type: ignore
126
+ raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
127
+ raise dbapi_error
123
128
  except Exception as e:
124
129
  raise e
125
130
 
@@ -136,14 +136,18 @@ def _init_workflow(
136
136
  "recovery_attempts": None,
137
137
  }
138
138
 
139
+ # If we have a class name, the first arg is the instance and do not serialize
140
+ if class_name is not None:
141
+ inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
142
+
139
143
  if temp_wf_type != "transaction":
140
- # Don't synchronously record the status or inputs for single transaction workflows
144
+ # Synchronously record the status and inputs for workflows and single-step workflows
145
+ # We also have to do this for single-step workflows because of the foreign key constraint on the operation outputs table
141
146
  dbos.sys_db.update_workflow_status(status, False, ctx.in_recovery)
142
-
143
- # If we have a class name, the first arg is the instance and do not serialize
144
- if class_name is not None:
145
- inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
146
147
  dbos.sys_db.update_workflow_inputs(wfid, utils.serialize(inputs))
148
+ else:
149
+ # Buffer the inputs for single-transaction workflows, but don't buffer the status
150
+ dbos.sys_db.buffer_workflow_inputs(wfid, utils.serialize(inputs))
147
151
 
148
152
  return status
149
153
 
@@ -173,20 +177,6 @@ def _execute_workflow(
173
177
  status["error"] = utils.serialize(error)
174
178
  dbos.sys_db.update_workflow_status(status)
175
179
  raise error
176
- finally:
177
- if get_temp_workflow_type(func) == "transaction":
178
- # Buffer the inputs for single transaction workflows
179
- inputs: WorkflowInputs = {
180
- "args": args,
181
- "kwargs": kwargs,
182
- }
183
- # If we have a class name, the first arg is the instance and do not serialize
184
- class_name = get_dbos_class_name(get_func_info(func), func, args)
185
- if class_name is not None:
186
- inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
187
- dbos.sys_db.buffer_workflow_inputs(
188
- status["workflow_uuid"], utils.serialize(inputs)
189
- )
190
180
 
191
181
  return output
192
182
 
@@ -617,6 +607,7 @@ def _step(
617
607
  utils.serialize(error) if error is not None else None
618
608
  )
619
609
  dbos.sys_db.record_operation_result(step_output)
610
+
620
611
  if error is not None:
621
612
  raise error
622
613
  return output
@@ -4,13 +4,25 @@ import select
4
4
  import threading
5
5
  import time
6
6
  from enum import Enum
7
- from typing import Any, Dict, List, Literal, Optional, Sequence, TypedDict, cast
7
+ from typing import (
8
+ TYPE_CHECKING,
9
+ Any,
10
+ Dict,
11
+ List,
12
+ Literal,
13
+ Optional,
14
+ Sequence,
15
+ Set,
16
+ TypedDict,
17
+ cast,
18
+ )
8
19
 
9
20
  import psycopg2
10
21
  import sqlalchemy as sa
11
22
  import sqlalchemy.dialects.postgresql as pg
12
23
  from alembic import command
13
24
  from alembic.config import Config
25
+ from sqlalchemy.exc import DBAPIError
14
26
 
15
27
  import dbos.utils as utils
16
28
  from dbos.error import (
@@ -199,6 +211,9 @@ class SystemDatabase:
199
211
  # Initialize the workflow status and inputs buffers
200
212
  self._workflow_status_buffer: Dict[str, WorkflowStatusInternal] = {}
201
213
  self._workflow_inputs_buffer: Dict[str, str] = {}
214
+ # Two sets for tracking which single-transaction workflows have been exported to the status table
215
+ self._exported_temp_txn_wf_status: Set[str] = set()
216
+ self._temp_txn_wf_ids: Set[str] = set()
202
217
  self._is_flushing_status_buffer = False
203
218
 
204
219
  # Now we can run background processes
@@ -264,6 +279,10 @@ class SystemDatabase:
264
279
  with self.engine.begin() as c:
265
280
  c.execute(cmd)
266
281
 
282
+ # Record we have exported status for this single-transaction workflow
283
+ if status["workflow_uuid"] in self._temp_txn_wf_ids:
284
+ self._exported_temp_txn_wf_status.add(status["workflow_uuid"])
285
+
267
286
  def set_workflow_status(
268
287
  self,
269
288
  workflow_uuid: str,
@@ -454,6 +473,11 @@ class SystemDatabase:
454
473
  with self.engine.begin() as c:
455
474
  c.execute(cmd)
456
475
 
476
+ if workflow_uuid in self._temp_txn_wf_ids:
477
+ # Clean up the single-transaction tracking sets
478
+ self._exported_temp_txn_wf_status.discard(workflow_uuid)
479
+ self._temp_txn_wf_ids.discard(workflow_uuid)
480
+
457
481
  def get_workflow_inputs(self, workflow_uuid: str) -> Optional[WorkflowInputs]:
458
482
  with self.engine.begin() as c:
459
483
  row = c.execute(
@@ -533,8 +557,10 @@ class SystemDatabase:
533
557
  else:
534
558
  with self.engine.begin() as c:
535
559
  c.execute(sql)
536
- except sa.exc.IntegrityError:
537
- raise DBOSWorkflowConflictIDError(result["workflow_uuid"])
560
+ except DBAPIError as dbapi_error:
561
+ if dbapi_error.orig.pgcode == "23505": # type: ignore
562
+ raise DBOSWorkflowConflictIDError(result["workflow_uuid"])
563
+ raise dbapi_error
538
564
  except Exception as e:
539
565
  raise e
540
566
 
@@ -588,8 +614,11 @@ class SystemDatabase:
588
614
  message=utils.serialize(message),
589
615
  )
590
616
  )
591
- except sa.exc.IntegrityError:
592
- raise DBOSNonExistentWorkflowError(destination_uuid)
617
+ except DBAPIError as dbapi_error:
618
+ # Foreign key violation
619
+ if dbapi_error.orig.pgcode == "23503": # type: ignore
620
+ raise DBOSNonExistentWorkflowError(destination_uuid)
621
+ raise dbapi_error
593
622
  except Exception as e:
594
623
  raise e
595
624
  output: OperationResultInternal = {
@@ -812,8 +841,10 @@ class SystemDatabase:
812
841
  value=utils.serialize(message),
813
842
  )
814
843
  )
815
- except sa.exc.IntegrityError:
816
- raise DBOSDuplicateWorkflowEventError(workflow_uuid, key)
844
+ except DBAPIError as dbapi_error:
845
+ if dbapi_error.orig.pgcode == "23505": # type: ignore
846
+ raise DBOSDuplicateWorkflowEventError(workflow_uuid, key)
847
+ raise dbapi_error
817
848
  except Exception as e:
818
849
  raise e
819
850
  output: OperationResultInternal = {
@@ -897,7 +928,7 @@ class SystemDatabase:
897
928
  return value
898
929
 
899
930
  def _flush_workflow_status_buffer(self) -> None:
900
- """Export the workflow status buffer to the database, up to the batch size."""
931
+ """Export the workflow status buffer to the database, up to the batch size"""
901
932
  if len(self._workflow_status_buffer) == 0:
902
933
  return
903
934
 
@@ -905,13 +936,16 @@ class SystemDatabase:
905
936
  exported_status: Dict[str, WorkflowStatusInternal] = {}
906
937
  with self.engine.begin() as c:
907
938
  exported = 0
908
- local_batch_size = min(
909
- buffer_flush_batch_size, len(self._workflow_status_buffer)
910
- )
911
- while exported < local_batch_size:
939
+ status_iter = iter(list(self._workflow_status_buffer))
940
+ wf_id: Optional[str] = None
941
+ while (
942
+ exported < buffer_flush_batch_size
943
+ and (wf_id := next(status_iter, None)) is not None
944
+ ):
912
945
  # Pop the first key in the buffer (FIFO)
913
- wf_id = next(iter(self._workflow_status_buffer))
914
- status = self._workflow_status_buffer.pop(wf_id)
946
+ status = self._workflow_status_buffer.pop(wf_id, None)
947
+ if status is None:
948
+ continue
915
949
  exported_status[wf_id] = status
916
950
  try:
917
951
  self.update_workflow_status(status, conn=c)
@@ -932,12 +966,18 @@ class SystemDatabase:
932
966
  exported_inputs: Dict[str, str] = {}
933
967
  with self.engine.begin() as c:
934
968
  exported = 0
935
- local_batch_size = min(
936
- buffer_flush_batch_size, len(self._workflow_inputs_buffer)
937
- )
938
- while exported < local_batch_size:
939
- wf_id = next(iter(self._workflow_inputs_buffer))
940
- inputs = self._workflow_inputs_buffer.pop(wf_id)
969
+ input_iter = iter(list(self._workflow_inputs_buffer))
970
+ wf_id: Optional[str] = None
971
+ while (
972
+ exported < buffer_flush_batch_size
973
+ and (wf_id := next(input_iter, None)) is not None
974
+ ):
975
+ if wf_id not in self._exported_temp_txn_wf_status:
976
+ # Skip exporting inputs if the status has not been exported yet
977
+ continue
978
+ inputs = self._workflow_inputs_buffer.pop(wf_id, None)
979
+ if inputs is None:
980
+ continue
941
981
  exported_inputs[wf_id] = inputs
942
982
  try:
943
983
  self.update_workflow_inputs(wf_id, inputs, conn=c)
@@ -972,6 +1012,7 @@ class SystemDatabase:
972
1012
  def buffer_workflow_inputs(self, workflow_id: str, inputs: str) -> None:
973
1013
  # inputs is a serialized WorkflowInputs string
974
1014
  self._workflow_inputs_buffer[workflow_id] = inputs
1015
+ self._temp_txn_wf_ids.add(workflow_id)
975
1016
 
976
1017
  @property
977
1018
  def _is_buffers_empty(self) -> bool:
@@ -23,7 +23,7 @@ dependencies = [
23
23
  ]
24
24
  requires-python = ">=3.9"
25
25
  readme = "README.md"
26
- version = "0.5.0a4"
26
+ version = "0.5.0a5"
27
27
 
28
28
  [project.license]
29
29
  text = "MIT"
@@ -34,18 +34,24 @@ def test_concurrent_conflict_uuid(dbos: DBOS) -> None:
34
34
  condition = threading.Condition()
35
35
  step_count = 0
36
36
  txn_count = 0
37
+ notified = False
37
38
 
38
39
  @DBOS.step()
39
40
  def test_step() -> str:
40
- nonlocal step_count
41
+ nonlocal step_count, notified
41
42
  step_count += 1
42
- condition.acquire()
43
43
  if step_count == 1:
44
44
  # Wait for the other one to notify
45
+ condition.acquire()
45
46
  condition.wait()
47
+ notified = True
48
+ condition.release()
46
49
  else:
47
- condition.notify()
48
- condition.release()
50
+ while not notified:
51
+ condition.acquire()
52
+ condition.notify()
53
+ condition.release()
54
+ time.sleep(0.1)
49
55
  return DBOS.workflow_id
50
56
 
51
57
  @DBOS.workflow()
@@ -61,15 +67,20 @@ def test_concurrent_conflict_uuid(dbos: DBOS) -> None:
61
67
  @DBOS.transaction(isolation_level="REPEATABLE READ")
62
68
  def test_transaction() -> str:
63
69
  DBOS.sql_session.execute(text("SELECT 1")).fetchall()
64
- nonlocal txn_count
70
+ nonlocal txn_count, notified
65
71
  txn_count += 1
66
- condition.acquire()
67
72
  if txn_count == 1:
68
73
  # Wait for the other one to notify
74
+ condition.acquire()
69
75
  condition.wait()
76
+ notified = True
77
+ condition.release()
70
78
  else:
71
- condition.notify()
72
- condition.release()
79
+ while not notified:
80
+ condition.acquire()
81
+ condition.notify()
82
+ condition.release()
83
+ time.sleep(0.1)
73
84
  return DBOS.workflow_id
74
85
 
75
86
  def test_txn_thread(id: str) -> str:
@@ -89,19 +100,21 @@ def test_concurrent_conflict_uuid(dbos: DBOS) -> None:
89
100
 
90
101
  # Make sure temp workflows can handle conflicts as well.
91
102
  step_count = 0
103
+ notified = False
92
104
  wfuuid = str(uuid.uuid4())
93
105
  with ThreadPoolExecutor(max_workers=2) as executor:
94
106
  future1 = executor.submit(test_comm_thread, wfuuid)
95
107
  future2 = executor.submit(test_comm_thread, wfuuid)
96
108
 
97
- assert future1.result() == wfuuid
98
- assert future2.result() == wfuuid
109
+ assert future1.result() == wfuuid
110
+ assert future2.result() == wfuuid
99
111
 
100
112
  # Make sure temp transactions can handle conflicts as well.
101
113
  wfuuid = str(uuid.uuid4())
114
+ notified = False
102
115
  with ThreadPoolExecutor(max_workers=2) as executor:
103
116
  future1 = executor.submit(test_txn_thread, wfuuid)
104
117
  future2 = executor.submit(test_txn_thread, wfuuid)
105
118
 
106
- assert future1.result() == wfuuid
107
- assert future2.result() == wfuuid
119
+ assert future1.result() == wfuuid
120
+ assert future2.result() == wfuuid
@@ -251,6 +251,10 @@ def test_temp_workflow(dbos: DBOS) -> None:
251
251
  res = test_step("var")
252
252
  assert res == "var"
253
253
 
254
+ # Flush workflow inputs buffer shouldn't fail due to foreign key violation.
255
+ # It should properly skip the transaction inputs.
256
+ dbos.sys_db._flush_workflow_inputs_buffer()
257
+
254
258
  # Wait for buffers to flush
255
259
  dbos.sys_db.wait_for_buffer_flush()
256
260
  wfs = dbos.sys_db.get_workflows(gwi)
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes