dbos 0.27.0a10__py3-none-any.whl → 0.27.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

dbos/_admin_server.py CHANGED
@@ -66,11 +66,11 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
66
66
  elif self.path == _deactivate_path:
67
67
  if not AdminRequestHandler.is_deactivated:
68
68
  dbos_logger.info(
69
- f"Deactivating DBOS executor {GlobalParams.executor_id} with version {GlobalParams.app_version}. This executor will complete existing workflows but will not start new workflows."
69
+ f"Deactivating DBOS executor {GlobalParams.executor_id} with version {GlobalParams.app_version}. This executor will complete existing workflows but will not create new workflows."
70
70
  )
71
71
  AdminRequestHandler.is_deactivated = True
72
- # Stop all scheduled workflows, queues, and kafka loops
73
- for event in self.dbos.stop_events:
72
+ # Stop all event receivers (scheduler and Kafka threads)
73
+ for event in self.dbos.poller_stop_events:
74
74
  event.set()
75
75
  self.send_response(200)
76
76
  self._end_headers()
dbos/_core.py CHANGED
@@ -296,7 +296,7 @@ def _init_workflow(
296
296
 
297
297
  if workflow_deadline_epoch_ms is not None:
298
298
  evt = threading.Event()
299
- dbos.stop_events.append(evt)
299
+ dbos.background_thread_stop_events.append(evt)
300
300
 
301
301
  def timeout_func() -> None:
302
302
  try:
dbos/_dbos.py CHANGED
@@ -197,7 +197,7 @@ class DBOSRegistry:
197
197
  self, evt: threading.Event, func: Callable[..., Any], *args: Any, **kwargs: Any
198
198
  ) -> None:
199
199
  if self.dbos and self.dbos._launched:
200
- self.dbos.stop_events.append(evt)
200
+ self.dbos.poller_stop_events.append(evt)
201
201
  self.dbos._executor.submit(func, *args, **kwargs)
202
202
  else:
203
203
  self.pollers.append((evt, func, args, kwargs))
@@ -330,7 +330,10 @@ class DBOS:
330
330
  self._registry: DBOSRegistry = _get_or_create_dbos_registry()
331
331
  self._registry.dbos = self
332
332
  self._admin_server_field: Optional[AdminServer] = None
333
- self.stop_events: List[threading.Event] = []
333
+ # Stop internal background threads (queue thread, timeout threads, etc.)
334
+ self.background_thread_stop_events: List[threading.Event] = []
335
+ # Stop pollers (event receivers) that can create new workflows (scheduler, Kafka)
336
+ self.poller_stop_events: List[threading.Event] = []
334
337
  self.fastapi: Optional["FastAPI"] = fastapi
335
338
  self.flask: Optional["Flask"] = flask
336
339
  self._executor_field: Optional[ThreadPoolExecutor] = None
@@ -502,7 +505,7 @@ class DBOS:
502
505
 
503
506
  # Start the queue thread
504
507
  evt = threading.Event()
505
- self.stop_events.append(evt)
508
+ self.background_thread_stop_events.append(evt)
506
509
  bg_queue_thread = threading.Thread(
507
510
  target=queue_thread, args=(evt, self), daemon=True
508
511
  )
@@ -515,7 +518,7 @@ class DBOS:
515
518
  dbos_domain = os.environ.get("DBOS_DOMAIN", "cloud.dbos.dev")
516
519
  self.conductor_url = f"wss://{dbos_domain}/conductor/v1alpha1"
517
520
  evt = threading.Event()
518
- self.stop_events.append(evt)
521
+ self.background_thread_stop_events.append(evt)
519
522
  self.conductor_websocket = ConductorWebsocket(
520
523
  self,
521
524
  conductor_url=self.conductor_url,
@@ -527,7 +530,7 @@ class DBOS:
527
530
 
528
531
  # Grab any pollers that were deferred and start them
529
532
  for evt, func, args, kwargs in self._registry.pollers:
530
- self.stop_events.append(evt)
533
+ self.poller_stop_events.append(evt)
531
534
  poller_thread = threading.Thread(
532
535
  target=func, args=args, kwargs=kwargs, daemon=True
533
536
  )
@@ -583,7 +586,9 @@ class DBOS:
583
586
 
584
587
  def _destroy(self) -> None:
585
588
  self._initialized = False
586
- for event in self.stop_events:
589
+ for event in self.poller_stop_events:
590
+ event.set()
591
+ for event in self.background_thread_stop_events:
587
592
  event.set()
588
593
  self._background_event_loop.stop()
589
594
  if self._sys_db_field is not None:
dbos/_error.py CHANGED
@@ -134,12 +134,17 @@ class DBOSNotAuthorizedError(DBOSException):
134
134
  """Exception raised by DBOS role-based security when the user is not authorized to access a function."""
135
135
 
136
136
  def __init__(self, msg: str):
137
+ self.msg = msg
137
138
  super().__init__(
138
139
  msg,
139
140
  dbos_error_code=DBOSErrorCode.NotAuthorized.value,
140
141
  )
141
142
  self.status_code = 403
142
143
 
144
+ def __reduce__(self) -> Any:
145
+ # Tell jsonpickle how to reconstruct this object
146
+ return (self.__class__, (self.msg,))
147
+
143
148
 
144
149
  class DBOSMaxStepRetriesExceeded(DBOSException):
145
150
  """Exception raised when a step was retried the maximimum number of times without success."""
@@ -185,11 +190,21 @@ class DBOSQueueDeduplicatedError(DBOSException):
185
190
  def __init__(
186
191
  self, workflow_id: str, queue_name: str, deduplication_id: str
187
192
  ) -> None:
193
+ self.workflow_id = workflow_id
194
+ self.queue_name = queue_name
195
+ self.deduplication_id = deduplication_id
188
196
  super().__init__(
189
197
  f"Workflow {workflow_id} was deduplicated due to an existing workflow in queue {queue_name} with deduplication ID {deduplication_id}.",
190
198
  dbos_error_code=DBOSErrorCode.QueueDeduplicated.value,
191
199
  )
192
200
 
201
+ def __reduce__(self) -> Any:
202
+ # Tell jsonpickle how to reconstruct this object
203
+ return (
204
+ self.__class__,
205
+ (self.workflow_id, self.queue_name, self.deduplication_id),
206
+ )
207
+
193
208
 
194
209
  #######################################
195
210
  ## BaseException
dbos/_recovery.py CHANGED
@@ -29,7 +29,7 @@ def startup_recovery_thread(
29
29
  ) -> None:
30
30
  """Attempt to recover local pending workflows on startup using a background thread."""
31
31
  stop_event = threading.Event()
32
- dbos.stop_events.append(stop_event)
32
+ dbos.background_thread_stop_events.append(stop_event)
33
33
  while not stop_event.is_set() and len(pending_workflows) > 0:
34
34
  try:
35
35
  for pending_workflow in list(pending_workflows):
dbos/_serialization.py CHANGED
@@ -1,8 +1,10 @@
1
1
  import types
2
- from typing import Any, Dict, Tuple, TypedDict
2
+ from typing import Any, Dict, Optional, Tuple, TypedDict
3
3
 
4
4
  import jsonpickle # type: ignore
5
5
 
6
+ from ._logger import dbos_logger
7
+
6
8
 
7
9
  class WorkflowInputs(TypedDict):
8
10
  args: Tuple[Any, ...]
@@ -51,5 +53,54 @@ def deserialize_args(serialized_data: str) -> WorkflowInputs:
51
53
 
52
54
  def deserialize_exception(serialized_data: str) -> Exception:
53
55
  """Deserialize JSON string back to a Python Exception using jsonpickle."""
54
- upo: Exception = jsonpickle.decode(serialized_data)
55
- return upo
56
+ exc: Exception = jsonpickle.decode(serialized_data)
57
+ return exc
58
+
59
+
60
+ def safe_deserialize(
61
+ workflow_id: str,
62
+ *,
63
+ serialized_input: Optional[str],
64
+ serialized_output: Optional[str],
65
+ serialized_exception: Optional[str],
66
+ ) -> tuple[Optional[WorkflowInputs], Optional[Any], Optional[Exception]]:
67
+ """
68
+ This function safely deserializes a workflow's recorded input and output/exception.
69
+ If any of them is not deserializable, it logs a warning and returns a string instead of throwing an exception.
70
+
71
+ This function is used in workflow introspection methods (get_workflows and get_queued_workflow)
72
+ to ensure errors related to nondeserializable objects are observable.
73
+ """
74
+ input: Optional[WorkflowInputs]
75
+ try:
76
+ input = (
77
+ deserialize_args(serialized_input) if serialized_input is not None else None
78
+ )
79
+ except Exception as e:
80
+ dbos_logger.warning(
81
+ f"Warning: input object could not be deserialized for workflow {workflow_id}, returning as string: {e}"
82
+ )
83
+ input = serialized_input # type: ignore
84
+ output: Optional[Any]
85
+ try:
86
+ output = (
87
+ deserialize(serialized_output) if serialized_output is not None else None
88
+ )
89
+ except Exception as e:
90
+ dbos_logger.warning(
91
+ f"Warning: output object could not be deserialized for workflow {workflow_id}, returning as string: {e}"
92
+ )
93
+ output = serialized_output
94
+ exception: Optional[Exception]
95
+ try:
96
+ exception = (
97
+ deserialize_exception(serialized_exception)
98
+ if serialized_exception is not None
99
+ else None
100
+ )
101
+ except Exception as e:
102
+ dbos_logger.warning(
103
+ f"Warning: exception object could not be deserialized for workflow {workflow_id}, returning as string: {e}"
104
+ )
105
+ exception = serialized_exception # type: ignore
106
+ return input, output, exception
dbos/_sys_db.py CHANGED
@@ -901,13 +901,15 @@ class SystemDatabase:
901
901
  info.app_version = row[14]
902
902
  info.app_id = row[15]
903
903
 
904
- inputs = _serialization.deserialize_args(row[16])
905
- if inputs is not None:
906
- info.input = inputs
907
- if info.status == WorkflowStatusString.SUCCESS.value:
908
- info.output = _serialization.deserialize(row[17])
909
- elif info.status == WorkflowStatusString.ERROR.value:
910
- info.error = _serialization.deserialize_exception(row[18])
904
+ inputs, output, exception = _serialization.safe_deserialize(
905
+ info.workflow_id,
906
+ serialized_input=row[16],
907
+ serialized_output=row[17],
908
+ serialized_exception=row[18],
909
+ )
910
+ info.input = inputs
911
+ info.output = output
912
+ info.error = exception
911
913
 
912
914
  infos.append(info)
913
915
  return infos
@@ -1007,13 +1009,15 @@ class SystemDatabase:
1007
1009
  info.app_version = row[14]
1008
1010
  info.app_id = row[15]
1009
1011
 
1010
- inputs = _serialization.deserialize_args(row[16])
1011
- if inputs is not None:
1012
- info.input = inputs
1013
- if info.status == WorkflowStatusString.SUCCESS.value:
1014
- info.output = _serialization.deserialize(row[17])
1015
- elif info.status == WorkflowStatusString.ERROR.value:
1016
- info.error = _serialization.deserialize_exception(row[18])
1012
+ inputs, output, exception = _serialization.safe_deserialize(
1013
+ info.workflow_id,
1014
+ serialized_input=row[16],
1015
+ serialized_output=row[17],
1016
+ serialized_exception=row[18],
1017
+ )
1018
+ info.input = inputs
1019
+ info.output = output
1020
+ info.error = exception
1017
1021
 
1018
1022
  infos.append(info)
1019
1023
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.27.0a10
3
+ Version: 0.27.1
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,23 +1,23 @@
1
- dbos-0.27.0a10.dist-info/METADATA,sha256=Gfag7_gwehHpSgpOJistZUvK08cQVC4C0ignrfpmvSM,5554
2
- dbos-0.27.0a10.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
- dbos-0.27.0a10.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-0.27.0a10.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-0.27.1.dist-info/METADATA,sha256=wKicsi_27L1ILS4hNRfiLMToG59MKgfaWkIcEFFoHIE,5551
2
+ dbos-0.27.1.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
+ dbos-0.27.1.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-0.27.1.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=-FdBlOlr-f2tY__C23J4v22MoCAXqcDN_-zXsJXdoZ0,1005
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
- dbos/_admin_server.py,sha256=NG0JWQQer9kEslPNAA0dBv-O262sjarz7ZSlv8yird0,9053
7
+ dbos/_admin_server.py,sha256=CM02jyC9H21fM7Pjn1BhPxNwAOV7CXmMJd0SdaNq8dQ,9062
8
8
  dbos/_app_db.py,sha256=3j8_5-MlSDY0otLRszFE-GfenU6JC20fcfSL-drSNYk,11800
9
9
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
10
  dbos/_client.py,sha256=Id-jzAUH6JMN-9WmAGyo0vm-nc0URjNIVwA2iKnCN5Q,13418
11
11
  dbos/_conductor/conductor.py,sha256=HYzVL29IMMrs2Mnms_7cHJynCnmmEN5SDQOMjzn3UoU,16840
12
12
  dbos/_conductor/protocol.py,sha256=zEKIuOQdIaSduNqfZKpo8PSD9_1oNpKIPnBNCu3RUyE,6681
13
13
  dbos/_context.py,sha256=5aJHOjh6-2Zc7Fwzw924Vg0utLEkaR-oBMRdz3cE95k,23680
14
- dbos/_core.py,sha256=7zhdO-VfZe84wgOzBVsliqO-BI20OzcLTFqvrGyxttw,48425
14
+ dbos/_core.py,sha256=iTcyDhi2nNgYaZqEOjlggjg9HyqKFliaROpjzyJOsik,48443
15
15
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
- dbos/_dbos.py,sha256=ENDQ6Xi4MoKrjXoCRlk1B64yZP7D-MyDUjUlOTRsw9I,48314
16
+ dbos/_dbos.py,sha256=I3apaZar0137U8Edlnoxi8Zgh1Czy9AsDKlROG0kQu8,48701
17
17
  dbos/_dbos_config.py,sha256=L0Z0OOB5FoPM9g-joZqXGeJnlxWQsEUtgPtgtg9Uf48,21732
18
18
  dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
19
19
  dbos/_docker_pg_helper.py,sha256=NmcgqmR5rQA_4igfeqh8ugNT2z3YmoOvuep_MEtxTiY,5854
20
- dbos/_error.py,sha256=FOvv40rCWezx9J-0z45ScPYHO8WpmI2IHErZ8Wl1NU4,7510
20
+ dbos/_error.py,sha256=EN4eVBjMT3k7O7hfqJl6mIf4sxWPsiAOM086yhcGH_g,8012
21
21
  dbos/_event_loop.py,sha256=NmaLbEQFfEK36S_0KhVD39YdYrGce3qSKCTJ-5RqKQ0,2136
22
22
  dbos/_fastapi.py,sha256=PhaKftbApHnjtYEOw0EYna_3K0cmz__J9of7mRJWzu4,3704
23
23
  dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
@@ -38,7 +38,7 @@ dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT
38
38
  dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py,sha256=m90Lc5YH0ZISSq1MyxND6oq3RZrZKrIqEsZtwJ1jWxA,1049
39
39
  dbos/_outcome.py,sha256=EXxBg4jXCVJsByDQ1VOCIedmbeq_03S6d-p1vqQrLFU,6810
40
40
  dbos/_queue.py,sha256=aKCGahWBGJOLOv5PCOOId96Va3YQ4ICuHWXy-eQXohE,3526
41
- dbos/_recovery.py,sha256=98Py7icfytyIELJ54gIsdvmURBvTb0HmWaxEAuYL0dc,2546
41
+ dbos/_recovery.py,sha256=jVMexjfCCNopzyn8gVQzJCmGJaP9G3C1EFaoCQ_Nh7g,2564
42
42
  dbos/_registrations.py,sha256=EZzG3ZfYmWA2bHX2hpnSIQ3PTi3-cXsvbcmXjyOusMk,7302
43
43
  dbos/_request.py,sha256=cX1B3Atlh160phgS35gF1VEEV4pD126c9F3BDgBmxZU,929
44
44
  dbos/_roles.py,sha256=iOsgmIAf1XVzxs3gYWdGRe1B880YfOw5fpU7Jwx8_A8,2271
@@ -46,8 +46,8 @@ dbos/_scheduler.py,sha256=SR1oRZRcVzYsj-JauV2LA8JtwTkt8mru7qf6H1AzQ1U,2027
46
46
  dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
47
  dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
48
48
  dbos/_schemas/system_database.py,sha256=wLqrhApNqrwZC1SdUxi_ca0y_66WzKaaBOxvND4_bdg,5738
49
- dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
50
- dbos/_sys_db.py,sha256=CJf-PgAerEZdbCe9ZJOJnjkrqPcflUz04KFJaGRMm7Q,82389
49
+ dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
50
+ dbos/_sys_db.py,sha256=caIbhOwAnfugGzhnJ5rOG2V_bXphD9tJ4Un37gnG47A,82281
51
51
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
52
52
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
53
  dbos/_templates/dbos-db-starter/__package/main.py,sha256=nJMN3ZD2lmwg4Dcgmiwqc-tQGuCJuJal2Xl85iA277U,2453
@@ -67,4 +67,4 @@ dbos/cli/cli.py,sha256=a3rUrHog5-e22KjjUPOuTjH20PmUgSP0amRpMd6LVJE,18882
67
67
  dbos/dbos-config.schema.json,sha256=8KcwJb_sQc4-6tQG2TLmjE_nratfrQa0qVLl9XPsvWE,6367
68
68
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
69
69
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
70
- dbos-0.27.0a10.dist-info/RECORD,,
70
+ dbos-0.27.1.dist-info/RECORD,,