dbos 0.5.0a4__py3-none-any.whl → 0.5.0a7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

@@ -3,6 +3,7 @@ from typing import Optional, TypedDict, cast
3
3
  import sqlalchemy as sa
4
4
  import sqlalchemy.dialects.postgresql as pg
5
5
  import sqlalchemy.exc as sa_exc
6
+ from sqlalchemy.exc import DBAPIError
6
7
  from sqlalchemy.orm import Session, sessionmaker
7
8
 
8
9
  from dbos.error import DBOSWorkflowConflictIDError
@@ -95,8 +96,10 @@ class ApplicationDatabase:
95
96
  ),
96
97
  )
97
98
  )
98
- except sa_exc.IntegrityError:
99
- raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
99
+ except DBAPIError as dbapi_error:
100
+ if dbapi_error.orig.pgcode == "23505": # type: ignore
101
+ raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
102
+ raise dbapi_error
100
103
  except Exception as e:
101
104
  raise e
102
105
 
@@ -118,8 +121,10 @@ class ApplicationDatabase:
118
121
  ),
119
122
  )
120
123
  )
121
- except sa_exc.IntegrityError:
122
- raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
124
+ except DBAPIError as dbapi_error:
125
+ if dbapi_error.orig.pgcode == "23505": # type: ignore
126
+ raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
127
+ raise dbapi_error
123
128
  except Exception as e:
124
129
  raise e
125
130
 
dbos/core.py CHANGED
@@ -136,14 +136,18 @@ def _init_workflow(
136
136
  "recovery_attempts": None,
137
137
  }
138
138
 
139
+ # If we have a class name, the first arg is the instance and do not serialize
140
+ if class_name is not None:
141
+ inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
142
+
139
143
  if temp_wf_type != "transaction":
140
- # Don't synchronously record the status or inputs for single transaction workflows
144
+ # Synchronously record the status and inputs for workflows and single-step workflows
145
+ # We also have to do this for single-step workflows because of the foreign key constraint on the operation outputs table
141
146
  dbos.sys_db.update_workflow_status(status, False, ctx.in_recovery)
142
-
143
- # If we have a class name, the first arg is the instance and do not serialize
144
- if class_name is not None:
145
- inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
146
147
  dbos.sys_db.update_workflow_inputs(wfid, utils.serialize(inputs))
148
+ else:
149
+ # Buffer the inputs for single-transaction workflows, but don't buffer the status
150
+ dbos.sys_db.buffer_workflow_inputs(wfid, utils.serialize(inputs))
147
151
 
148
152
  return status
149
153
 
@@ -173,20 +177,6 @@ def _execute_workflow(
173
177
  status["error"] = utils.serialize(error)
174
178
  dbos.sys_db.update_workflow_status(status)
175
179
  raise error
176
- finally:
177
- if get_temp_workflow_type(func) == "transaction":
178
- # Buffer the inputs for single transaction workflows
179
- inputs: WorkflowInputs = {
180
- "args": args,
181
- "kwargs": kwargs,
182
- }
183
- # If we have a class name, the first arg is the instance and do not serialize
184
- class_name = get_dbos_class_name(get_func_info(func), func, args)
185
- if class_name is not None:
186
- inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
187
- dbos.sys_db.buffer_workflow_inputs(
188
- status["workflow_uuid"], utils.serialize(inputs)
189
- )
190
180
 
191
181
  return output
192
182
 
@@ -617,6 +607,7 @@ def _step(
617
607
  utils.serialize(error) if error is not None else None
618
608
  )
619
609
  dbos.sys_db.record_operation_result(step_output)
610
+
620
611
  if error is not None:
621
612
  raise error
622
613
  return output
dbos/dbos.py CHANGED
@@ -72,7 +72,7 @@ from dbos.error import DBOSException, DBOSNonExistentWorkflowError
72
72
 
73
73
  from .application_database import ApplicationDatabase
74
74
  from .dbos_config import ConfigFile, load_config, set_env_vars
75
- from .logger import config_logger, dbos_logger, init_logger
75
+ from .logger import add_otlp_to_all_loggers, config_logger, dbos_logger, init_logger
76
76
  from .system_database import SystemDatabase
77
77
 
78
78
  # Most DBOS functions are just any callable F, so decorators / wrappers work on F
@@ -204,7 +204,6 @@ class DBOS:
204
204
  cls: Type[DBOS],
205
205
  fastapi: Optional["FastAPI"] = None,
206
206
  config: Optional[ConfigFile] = None,
207
- launch: bool = True,
208
207
  ) -> DBOS:
209
208
  global _dbos_global_instance
210
209
  global _dbos_global_registry
@@ -219,7 +218,7 @@ class DBOS:
219
218
  )
220
219
  config = _dbos_global_registry.config
221
220
  _dbos_global_instance = super().__new__(cls)
222
- _dbos_global_instance.__init__(fastapi=fastapi, config=config, launch=launch) # type: ignore
221
+ _dbos_global_instance.__init__(fastapi=fastapi, config=config) # type: ignore
223
222
  else:
224
223
  if (config is not None and _dbos_global_instance.config is not config) or (
225
224
  _dbos_global_instance.fastapi is not fastapi
@@ -242,7 +241,6 @@ class DBOS:
242
241
  self,
243
242
  fastapi: Optional["FastAPI"] = None,
244
243
  config: Optional[ConfigFile] = None,
245
- launch: bool = True,
246
244
  ) -> None:
247
245
  if hasattr(self, "_initialized") and self._initialized:
248
246
  return
@@ -270,7 +268,6 @@ class DBOS:
270
268
 
271
269
  setup_fastapi_middleware(self.fastapi)
272
270
  self.fastapi.on_event("startup")(self.launch)
273
- launch = False
274
271
 
275
272
  # Register send_stub as a workflow
276
273
  def send_temp_workflow(
@@ -283,8 +280,8 @@ class DBOS:
283
280
  set_temp_workflow_type(send_temp_workflow, "send")
284
281
  self._registry.register_wf_function(TEMP_SEND_WF_NAME, temp_send_wf)
285
282
 
286
- if launch:
287
- self.launch()
283
+ for handler in dbos_logger.handlers:
284
+ handler.flush()
288
285
 
289
286
  @property
290
287
  def executor(self) -> ThreadPoolExecutor:
@@ -342,9 +339,13 @@ class DBOS:
342
339
  self.executor.submit(func, *args, **kwargs)
343
340
  self._registry.pollers = []
344
341
 
345
- dbos_logger.info("DBOS initialized")
342
+ dbos_logger.info("DBOS launched")
343
+
344
+ # Flush handlers and add OTLP to all loggers if enabled
345
+ # to enable their export in DBOS Cloud
346
346
  for handler in dbos_logger.handlers:
347
347
  handler.flush()
348
+ add_otlp_to_all_loggers()
348
349
 
349
350
  def _destroy(self) -> None:
350
351
  self._initialized = False
dbos/logger.py CHANGED
@@ -12,6 +12,7 @@ if TYPE_CHECKING:
12
12
  from dbos.dbos_config import ConfigFile
13
13
 
14
14
  dbos_logger = logging.getLogger("dbos")
15
+ otlp_handler, otlp_transformer = None, None
15
16
 
16
17
 
17
18
  class DBOSLogTransformer(logging.Filter):
@@ -73,25 +74,28 @@ def config_logger(config: "ConfigFile") -> None:
73
74
  export_timeout_millis=5000,
74
75
  )
75
76
  )
77
+ global otlp_handler
76
78
  otlp_handler = LoggingHandler(logger_provider=log_provider)
77
79
 
78
80
  # Attach DBOS-specific attributes to all log entries.
81
+ global otlp_transformer
79
82
  otlp_transformer = DBOSLogTransformer()
80
83
 
81
- # Direct all logs to OTLP
82
- add_otlp_to_all_loggers(otlp_handler, otlp_transformer)
84
+ # Direct DBOS logs to OTLP
85
+ dbos_logger.addHandler(otlp_handler)
86
+ dbos_logger.addFilter(otlp_transformer)
83
87
 
84
88
 
85
- def add_otlp_to_all_loggers(
86
- otlp_handler: LoggingHandler, otlp_transformer: DBOSLogTransformer
87
- ) -> None:
88
- root = logging.root
89
+ def add_otlp_to_all_loggers() -> None:
90
+ if otlp_handler is not None and otlp_transformer is not None:
91
+ root = logging.root
89
92
 
90
- root.addHandler(otlp_handler)
91
- root.addFilter(otlp_transformer)
93
+ root.addHandler(otlp_handler)
94
+ root.addFilter(otlp_transformer)
92
95
 
93
- for logger_name in root.manager.loggerDict:
94
- logger = logging.getLogger(logger_name)
95
- if not logger.propagate:
96
- logger.addHandler(otlp_handler)
97
- logger.addFilter(otlp_transformer)
96
+ for logger_name in root.manager.loggerDict:
97
+ if logger_name != dbos_logger.name:
98
+ logger = logging.getLogger(logger_name)
99
+ if not logger.propagate:
100
+ logger.addHandler(otlp_handler)
101
+ logger.addFilter(otlp_transformer)
dbos/system_database.py CHANGED
@@ -4,13 +4,25 @@ import select
4
4
  import threading
5
5
  import time
6
6
  from enum import Enum
7
- from typing import Any, Dict, List, Literal, Optional, Sequence, TypedDict, cast
7
+ from typing import (
8
+ TYPE_CHECKING,
9
+ Any,
10
+ Dict,
11
+ List,
12
+ Literal,
13
+ Optional,
14
+ Sequence,
15
+ Set,
16
+ TypedDict,
17
+ cast,
18
+ )
8
19
 
9
20
  import psycopg2
10
21
  import sqlalchemy as sa
11
22
  import sqlalchemy.dialects.postgresql as pg
12
23
  from alembic import command
13
24
  from alembic.config import Config
25
+ from sqlalchemy.exc import DBAPIError
14
26
 
15
27
  import dbos.utils as utils
16
28
  from dbos.error import (
@@ -199,6 +211,9 @@ class SystemDatabase:
199
211
  # Initialize the workflow status and inputs buffers
200
212
  self._workflow_status_buffer: Dict[str, WorkflowStatusInternal] = {}
201
213
  self._workflow_inputs_buffer: Dict[str, str] = {}
214
+ # Two sets for tracking which single-transaction workflows have been exported to the status table
215
+ self._exported_temp_txn_wf_status: Set[str] = set()
216
+ self._temp_txn_wf_ids: Set[str] = set()
202
217
  self._is_flushing_status_buffer = False
203
218
 
204
219
  # Now we can run background processes
@@ -264,6 +279,10 @@ class SystemDatabase:
264
279
  with self.engine.begin() as c:
265
280
  c.execute(cmd)
266
281
 
282
+ # Record we have exported status for this single-transaction workflow
283
+ if status["workflow_uuid"] in self._temp_txn_wf_ids:
284
+ self._exported_temp_txn_wf_status.add(status["workflow_uuid"])
285
+
267
286
  def set_workflow_status(
268
287
  self,
269
288
  workflow_uuid: str,
@@ -454,6 +473,11 @@ class SystemDatabase:
454
473
  with self.engine.begin() as c:
455
474
  c.execute(cmd)
456
475
 
476
+ if workflow_uuid in self._temp_txn_wf_ids:
477
+ # Clean up the single-transaction tracking sets
478
+ self._exported_temp_txn_wf_status.discard(workflow_uuid)
479
+ self._temp_txn_wf_ids.discard(workflow_uuid)
480
+
457
481
  def get_workflow_inputs(self, workflow_uuid: str) -> Optional[WorkflowInputs]:
458
482
  with self.engine.begin() as c:
459
483
  row = c.execute(
@@ -533,8 +557,10 @@ class SystemDatabase:
533
557
  else:
534
558
  with self.engine.begin() as c:
535
559
  c.execute(sql)
536
- except sa.exc.IntegrityError:
537
- raise DBOSWorkflowConflictIDError(result["workflow_uuid"])
560
+ except DBAPIError as dbapi_error:
561
+ if dbapi_error.orig.pgcode == "23505": # type: ignore
562
+ raise DBOSWorkflowConflictIDError(result["workflow_uuid"])
563
+ raise dbapi_error
538
564
  except Exception as e:
539
565
  raise e
540
566
 
@@ -588,8 +614,11 @@ class SystemDatabase:
588
614
  message=utils.serialize(message),
589
615
  )
590
616
  )
591
- except sa.exc.IntegrityError:
592
- raise DBOSNonExistentWorkflowError(destination_uuid)
617
+ except DBAPIError as dbapi_error:
618
+ # Foreign key violation
619
+ if dbapi_error.orig.pgcode == "23503": # type: ignore
620
+ raise DBOSNonExistentWorkflowError(destination_uuid)
621
+ raise dbapi_error
593
622
  except Exception as e:
594
623
  raise e
595
624
  output: OperationResultInternal = {
@@ -705,7 +734,6 @@ class SystemDatabase:
705
734
  )
706
735
  notification_cursor = self.notification_conn.cursor()
707
736
 
708
- dbos_logger.info("Listening to notifications")
709
737
  notification_cursor.execute("LISTEN dbos_notifications_channel")
710
738
  notification_cursor.execute("LISTEN dbos_workflow_events_channel")
711
739
  while self._run_background_processes:
@@ -812,8 +840,10 @@ class SystemDatabase:
812
840
  value=utils.serialize(message),
813
841
  )
814
842
  )
815
- except sa.exc.IntegrityError:
816
- raise DBOSDuplicateWorkflowEventError(workflow_uuid, key)
843
+ except DBAPIError as dbapi_error:
844
+ if dbapi_error.orig.pgcode == "23505": # type: ignore
845
+ raise DBOSDuplicateWorkflowEventError(workflow_uuid, key)
846
+ raise dbapi_error
817
847
  except Exception as e:
818
848
  raise e
819
849
  output: OperationResultInternal = {
@@ -897,7 +927,7 @@ class SystemDatabase:
897
927
  return value
898
928
 
899
929
  def _flush_workflow_status_buffer(self) -> None:
900
- """Export the workflow status buffer to the database, up to the batch size."""
930
+ """Export the workflow status buffer to the database, up to the batch size"""
901
931
  if len(self._workflow_status_buffer) == 0:
902
932
  return
903
933
 
@@ -905,13 +935,16 @@ class SystemDatabase:
905
935
  exported_status: Dict[str, WorkflowStatusInternal] = {}
906
936
  with self.engine.begin() as c:
907
937
  exported = 0
908
- local_batch_size = min(
909
- buffer_flush_batch_size, len(self._workflow_status_buffer)
910
- )
911
- while exported < local_batch_size:
938
+ status_iter = iter(list(self._workflow_status_buffer))
939
+ wf_id: Optional[str] = None
940
+ while (
941
+ exported < buffer_flush_batch_size
942
+ and (wf_id := next(status_iter, None)) is not None
943
+ ):
912
944
  # Pop the first key in the buffer (FIFO)
913
- wf_id = next(iter(self._workflow_status_buffer))
914
- status = self._workflow_status_buffer.pop(wf_id)
945
+ status = self._workflow_status_buffer.pop(wf_id, None)
946
+ if status is None:
947
+ continue
915
948
  exported_status[wf_id] = status
916
949
  try:
917
950
  self.update_workflow_status(status, conn=c)
@@ -932,12 +965,18 @@ class SystemDatabase:
932
965
  exported_inputs: Dict[str, str] = {}
933
966
  with self.engine.begin() as c:
934
967
  exported = 0
935
- local_batch_size = min(
936
- buffer_flush_batch_size, len(self._workflow_inputs_buffer)
937
- )
938
- while exported < local_batch_size:
939
- wf_id = next(iter(self._workflow_inputs_buffer))
940
- inputs = self._workflow_inputs_buffer.pop(wf_id)
968
+ input_iter = iter(list(self._workflow_inputs_buffer))
969
+ wf_id: Optional[str] = None
970
+ while (
971
+ exported < buffer_flush_batch_size
972
+ and (wf_id := next(input_iter, None)) is not None
973
+ ):
974
+ if wf_id not in self._exported_temp_txn_wf_status:
975
+ # Skip exporting inputs if the status has not been exported yet
976
+ continue
977
+ inputs = self._workflow_inputs_buffer.pop(wf_id, None)
978
+ if inputs is None:
979
+ continue
941
980
  exported_inputs[wf_id] = inputs
942
981
  try:
943
982
  self.update_workflow_inputs(wf_id, inputs, conn=c)
@@ -972,6 +1011,7 @@ class SystemDatabase:
972
1011
  def buffer_workflow_inputs(self, workflow_id: str, inputs: str) -> None:
973
1012
  # inputs is a serialized WorkflowInputs string
974
1013
  self._workflow_inputs_buffer[workflow_id] = inputs
1014
+ self._temp_txn_wf_ids.add(workflow_id)
975
1015
 
976
1016
  @property
977
1017
  def _is_buffers_empty(self) -> bool:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.5.0a4
3
+ Version: 0.5.0a7
4
4
  Summary: A Python framework for backends that scale
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,20 +1,20 @@
1
- dbos-0.5.0a4.dist-info/METADATA,sha256=l_v0uEPtP7uqtxzLfs6FCFNe3jLyhnG1of_HACqUUyo,5420
2
- dbos-0.5.0a4.dist-info/WHEEL,sha256=rSwsxJWe3vzyR5HCwjWXQruDgschpei4h_giTm0dJVE,90
3
- dbos-0.5.0a4.dist-info/entry_points.txt,sha256=3PmOPbM4FYxEmggRRdJw0oAsiBzKR8U0yx7bmwUmMOM,39
4
- dbos-0.5.0a4.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-0.5.0a7.dist-info/METADATA,sha256=LdnWhAfO7Lnl85g-bvhXhoFYrCHkz7Jxmj_yqEKpPVc,5420
2
+ dbos-0.5.0a7.dist-info/WHEEL,sha256=rSwsxJWe3vzyR5HCwjWXQruDgschpei4h_giTm0dJVE,90
3
+ dbos-0.5.0a7.dist-info/entry_points.txt,sha256=3PmOPbM4FYxEmggRRdJw0oAsiBzKR8U0yx7bmwUmMOM,39
4
+ dbos-0.5.0a7.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=X1LdP36NomDtvPfFwoMNtgXf81TO05jj7vltsp79UUw,787
6
6
  dbos/admin_sever.py,sha256=KtzH6aKyskCm4h3yulpy9jb5PIqRlYI2sjctw5mvaKY,3395
7
- dbos/application_database.py,sha256=cBSfyB-5KGFBtigLpFPWEAgmrKONAUnG1825vXPifBk,5411
7
+ dbos/application_database.py,sha256=1K3kE96BgGi_QWOd2heXluyNTwFAwlUVuAR6JKKUqf0,5659
8
8
  dbos/cli.py,sha256=QnbGtZ8S963q3iyFvXNBcL4DB35r4SFMarlb5DRqN6M,7915
9
9
  dbos/context.py,sha256=JZMV2RtSpTK7lnyyWxeBmGPwrZSB00XZEP6R6MT9ygQ,15690
10
- dbos/core.py,sha256=8KkmHYQtxbtfFI_2sAf4DYmiznwYxLLhE6z-FLc3Gho,28675
10
+ dbos/core.py,sha256=HfKnPpIaQqIBAHzP2hD67aSIchTHp87NgD21CcujKkE,28300
11
11
  dbos/dbos-config.schema.json,sha256=azpfmoDZg7WfSy3kvIsk9iEiKB_-VZt03VEOoXJAkqE,5331
12
- dbos/dbos.py,sha256=HngS2BUWSbWPmloXGr-KE81BQ8dpZtlvOXM4tx4_Qhg,26246
12
+ dbos/dbos.py,sha256=-zrxmo_yN4vPTKQdyDtAcGlZI4-RV4PZBiIFayNSHyI,26342
13
13
  dbos/dbos_config.py,sha256=EkO0c0xaIM7_vAAqqnvNNEAKG5fOJbmmalqnZvaKYZA,5312
14
14
  dbos/decorators.py,sha256=lbPefsLK6Cya4cb7TrOcLglOpGT3pc6qjZdsQKlfZLg,629
15
15
  dbos/error.py,sha256=nBdLC4hxGO_K9V26YbDGOo7xi1CKuN4PsE_cBv7K8Cc,3798
16
16
  dbos/fastapi.py,sha256=ZFcMizyv3pizo5zf0sSF6U4GoR3rQH8LxGipkQIGHfU,2282
17
- dbos/logger.py,sha256=cfybbu6F1zsgYLEPW8D8V6h033u-YedLXnGMnQQM6-4,3341
17
+ dbos/logger.py,sha256=D-aFSZUCHBP34J1IZ5YNkTrJW-rDiH3py_v9jLU4Yrk,3565
18
18
  dbos/migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
19
19
  dbos/migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
20
20
  dbos/migrations/versions/5c361fc04708_added_system_tables.py,sha256=QMgFMb0aLgC25YicsvPSr6AHRCA6Zd66hyaRUhwKzrQ,6404
@@ -28,7 +28,7 @@ dbos/scheduler/scheduler.py,sha256=uO4_9jmWW2rLv1ODL3lc1cE_37ZaVTgnvmFx_FAlN50,1
28
28
  dbos/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
29
  dbos/schemas/application_database.py,sha256=q_Wr2XbiZNBYFkOtu7uKavo1T_cSOBblxKGHThYGGsY,962
30
30
  dbos/schemas/system_database.py,sha256=5V3vqnEzry0Hn7ZbVS9Gs_dJKia8uX8p7mGC82Ru8rk,4303
31
- dbos/system_database.py,sha256=h04PngaTdHxr1zfXcH6rSdac_vX0mSsD4SWHyOEWpJQ,38147
31
+ dbos/system_database.py,sha256=84c53iAel113SRb7DcgFJ8XQNWBhD4VrCRCb0s5Oe8Y,39635
32
32
  dbos/templates/hello/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
33
33
  dbos/templates/hello/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
34
  dbos/templates/hello/__package/main.py,sha256=hJgp3S14cseT7zWIZsPwjqdzwTCw1aLo8kPKsTvYz0Y,2976
@@ -42,4 +42,4 @@ dbos/templates/hello/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKs
42
42
  dbos/tracer.py,sha256=RPW9oxmX9tSc0Yq7O-FAhpQWBg1QT7Ni1Q06uwhtNDk,2237
43
43
  dbos/utils.py,sha256=hWj9iWDrby2cVEhb0pG-IdnrxLqP64NhkaWUXiLc8bA,402
44
44
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
45
- dbos-0.5.0a4.dist-info/RECORD,,
45
+ dbos-0.5.0a7.dist-info/RECORD,,
File without changes