dbos 0.27.0a10__tar.gz → 0.27.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-0.27.0a10 → dbos-0.27.1}/PKG-INFO +1 -1
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_admin_server.py +3 -3
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_core.py +1 -1
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_dbos.py +11 -6
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_error.py +15 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_recovery.py +1 -1
- dbos-0.27.1/dbos/_serialization.py +106 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_sys_db.py +18 -14
- {dbos-0.27.0a10 → dbos-0.27.1}/pyproject.toml +1 -1
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_admin_server.py +54 -2
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_failures.py +46 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_scheduler.py +2 -1
- dbos-0.27.0a10/dbos/_serialization.py +0 -55
- {dbos-0.27.0a10 → dbos-0.27.1}/LICENSE +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/README.md +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/__init__.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/__main__.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_app_db.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_classproperty.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_client.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_conductor/conductor.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_conductor/protocol.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_context.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_croniter.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_dbos_config.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_debug.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_docker_pg_helper.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_event_loop.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_fastapi.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_flask.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_kafka.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_kafka_message.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_logger.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/env.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/script.py.mako +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_outcome.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_queue.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_registrations.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_request.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_roles.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_scheduler.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_schemas/__init__.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_schemas/application_database.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_schemas/system_database.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_templates/dbos-db-starter/__package/main.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_tracer.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_utils.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/_workflow_commands.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/cli/_github_init.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/cli/_template_init.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/cli/cli.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/dbos-config.schema.json +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/dbos/py.typed +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/__init__.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/atexit_no_ctor.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/atexit_no_launch.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/classdefs.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/client_collateral.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/client_worker.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/conftest.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/dupname_classdefs1.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/dupname_classdefsa.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/more_classdefs.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/queuedworkflow.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_async.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_classdecorators.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_client.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_concurrency.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_config.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_croniter.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_dbos.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_debug.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_docker_secrets.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_fastapi.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_fastapi_roles.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_flask.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_kafka.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_outcome.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_package.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_queue.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_schema_migration.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_singleton.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_spans.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_sqlalchemy.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_workflow_introspection.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/tests/test_workflow_management.py +0 -0
- {dbos-0.27.0a10 → dbos-0.27.1}/version/__init__.py +0 -0
@@ -66,11 +66,11 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
66
66
|
elif self.path == _deactivate_path:
|
67
67
|
if not AdminRequestHandler.is_deactivated:
|
68
68
|
dbos_logger.info(
|
69
|
-
f"Deactivating DBOS executor {GlobalParams.executor_id} with version {GlobalParams.app_version}. This executor will complete existing workflows but will not
|
69
|
+
f"Deactivating DBOS executor {GlobalParams.executor_id} with version {GlobalParams.app_version}. This executor will complete existing workflows but will not create new workflows."
|
70
70
|
)
|
71
71
|
AdminRequestHandler.is_deactivated = True
|
72
|
-
# Stop all
|
73
|
-
for event in self.dbos.
|
72
|
+
# Stop all event receivers (scheduler and Kafka threads)
|
73
|
+
for event in self.dbos.poller_stop_events:
|
74
74
|
event.set()
|
75
75
|
self.send_response(200)
|
76
76
|
self._end_headers()
|
@@ -197,7 +197,7 @@ class DBOSRegistry:
|
|
197
197
|
self, evt: threading.Event, func: Callable[..., Any], *args: Any, **kwargs: Any
|
198
198
|
) -> None:
|
199
199
|
if self.dbos and self.dbos._launched:
|
200
|
-
self.dbos.
|
200
|
+
self.dbos.poller_stop_events.append(evt)
|
201
201
|
self.dbos._executor.submit(func, *args, **kwargs)
|
202
202
|
else:
|
203
203
|
self.pollers.append((evt, func, args, kwargs))
|
@@ -330,7 +330,10 @@ class DBOS:
|
|
330
330
|
self._registry: DBOSRegistry = _get_or_create_dbos_registry()
|
331
331
|
self._registry.dbos = self
|
332
332
|
self._admin_server_field: Optional[AdminServer] = None
|
333
|
-
|
333
|
+
# Stop internal background threads (queue thread, timeout threads, etc.)
|
334
|
+
self.background_thread_stop_events: List[threading.Event] = []
|
335
|
+
# Stop pollers (event receivers) that can create new workflows (scheduler, Kafka)
|
336
|
+
self.poller_stop_events: List[threading.Event] = []
|
334
337
|
self.fastapi: Optional["FastAPI"] = fastapi
|
335
338
|
self.flask: Optional["Flask"] = flask
|
336
339
|
self._executor_field: Optional[ThreadPoolExecutor] = None
|
@@ -502,7 +505,7 @@ class DBOS:
|
|
502
505
|
|
503
506
|
# Start the queue thread
|
504
507
|
evt = threading.Event()
|
505
|
-
self.
|
508
|
+
self.background_thread_stop_events.append(evt)
|
506
509
|
bg_queue_thread = threading.Thread(
|
507
510
|
target=queue_thread, args=(evt, self), daemon=True
|
508
511
|
)
|
@@ -515,7 +518,7 @@ class DBOS:
|
|
515
518
|
dbos_domain = os.environ.get("DBOS_DOMAIN", "cloud.dbos.dev")
|
516
519
|
self.conductor_url = f"wss://{dbos_domain}/conductor/v1alpha1"
|
517
520
|
evt = threading.Event()
|
518
|
-
self.
|
521
|
+
self.background_thread_stop_events.append(evt)
|
519
522
|
self.conductor_websocket = ConductorWebsocket(
|
520
523
|
self,
|
521
524
|
conductor_url=self.conductor_url,
|
@@ -527,7 +530,7 @@ class DBOS:
|
|
527
530
|
|
528
531
|
# Grab any pollers that were deferred and start them
|
529
532
|
for evt, func, args, kwargs in self._registry.pollers:
|
530
|
-
self.
|
533
|
+
self.poller_stop_events.append(evt)
|
531
534
|
poller_thread = threading.Thread(
|
532
535
|
target=func, args=args, kwargs=kwargs, daemon=True
|
533
536
|
)
|
@@ -583,7 +586,9 @@ class DBOS:
|
|
583
586
|
|
584
587
|
def _destroy(self) -> None:
|
585
588
|
self._initialized = False
|
586
|
-
for event in self.
|
589
|
+
for event in self.poller_stop_events:
|
590
|
+
event.set()
|
591
|
+
for event in self.background_thread_stop_events:
|
587
592
|
event.set()
|
588
593
|
self._background_event_loop.stop()
|
589
594
|
if self._sys_db_field is not None:
|
@@ -134,12 +134,17 @@ class DBOSNotAuthorizedError(DBOSException):
|
|
134
134
|
"""Exception raised by DBOS role-based security when the user is not authorized to access a function."""
|
135
135
|
|
136
136
|
def __init__(self, msg: str):
|
137
|
+
self.msg = msg
|
137
138
|
super().__init__(
|
138
139
|
msg,
|
139
140
|
dbos_error_code=DBOSErrorCode.NotAuthorized.value,
|
140
141
|
)
|
141
142
|
self.status_code = 403
|
142
143
|
|
144
|
+
def __reduce__(self) -> Any:
|
145
|
+
# Tell jsonpickle how to reconstruct this object
|
146
|
+
return (self.__class__, (self.msg,))
|
147
|
+
|
143
148
|
|
144
149
|
class DBOSMaxStepRetriesExceeded(DBOSException):
|
145
150
|
"""Exception raised when a step was retried the maximimum number of times without success."""
|
@@ -185,11 +190,21 @@ class DBOSQueueDeduplicatedError(DBOSException):
|
|
185
190
|
def __init__(
|
186
191
|
self, workflow_id: str, queue_name: str, deduplication_id: str
|
187
192
|
) -> None:
|
193
|
+
self.workflow_id = workflow_id
|
194
|
+
self.queue_name = queue_name
|
195
|
+
self.deduplication_id = deduplication_id
|
188
196
|
super().__init__(
|
189
197
|
f"Workflow {workflow_id} was deduplicated due to an existing workflow in queue {queue_name} with deduplication ID {deduplication_id}.",
|
190
198
|
dbos_error_code=DBOSErrorCode.QueueDeduplicated.value,
|
191
199
|
)
|
192
200
|
|
201
|
+
def __reduce__(self) -> Any:
|
202
|
+
# Tell jsonpickle how to reconstruct this object
|
203
|
+
return (
|
204
|
+
self.__class__,
|
205
|
+
(self.workflow_id, self.queue_name, self.deduplication_id),
|
206
|
+
)
|
207
|
+
|
193
208
|
|
194
209
|
#######################################
|
195
210
|
## BaseException
|
@@ -29,7 +29,7 @@ def startup_recovery_thread(
|
|
29
29
|
) -> None:
|
30
30
|
"""Attempt to recover local pending workflows on startup using a background thread."""
|
31
31
|
stop_event = threading.Event()
|
32
|
-
dbos.
|
32
|
+
dbos.background_thread_stop_events.append(stop_event)
|
33
33
|
while not stop_event.is_set() and len(pending_workflows) > 0:
|
34
34
|
try:
|
35
35
|
for pending_workflow in list(pending_workflows):
|
@@ -0,0 +1,106 @@
|
|
1
|
+
import types
|
2
|
+
from typing import Any, Dict, Optional, Tuple, TypedDict
|
3
|
+
|
4
|
+
import jsonpickle # type: ignore
|
5
|
+
|
6
|
+
from ._logger import dbos_logger
|
7
|
+
|
8
|
+
|
9
|
+
class WorkflowInputs(TypedDict):
|
10
|
+
args: Tuple[Any, ...]
|
11
|
+
kwargs: Dict[str, Any]
|
12
|
+
|
13
|
+
|
14
|
+
def _validate_item(data: Any) -> None:
|
15
|
+
if isinstance(data, (types.FunctionType, types.MethodType)):
|
16
|
+
raise TypeError("Serialized data item should not be a function")
|
17
|
+
|
18
|
+
|
19
|
+
def serialize(data: Any) -> str:
|
20
|
+
"""Serialize an object to a JSON string using jsonpickle."""
|
21
|
+
_validate_item(data)
|
22
|
+
encoded_data: str = jsonpickle.encode(data, unpicklable=True)
|
23
|
+
return encoded_data
|
24
|
+
|
25
|
+
|
26
|
+
def serialize_args(data: WorkflowInputs) -> str:
|
27
|
+
"""Serialize args to a JSON string using jsonpickle."""
|
28
|
+
arg: Any
|
29
|
+
for arg in data["args"]:
|
30
|
+
_validate_item(arg)
|
31
|
+
for arg in data["kwargs"].values():
|
32
|
+
_validate_item(arg)
|
33
|
+
encoded_data: str = jsonpickle.encode(data, unpicklable=True)
|
34
|
+
return encoded_data
|
35
|
+
|
36
|
+
|
37
|
+
def serialize_exception(data: Exception) -> str:
|
38
|
+
"""Serialize an Exception object to a JSON string using jsonpickle."""
|
39
|
+
encoded_data: str = jsonpickle.encode(data, unpicklable=True)
|
40
|
+
return encoded_data
|
41
|
+
|
42
|
+
|
43
|
+
def deserialize(serialized_data: str) -> Any:
|
44
|
+
"""Deserialize a JSON string back to a Python object using jsonpickle."""
|
45
|
+
return jsonpickle.decode(serialized_data)
|
46
|
+
|
47
|
+
|
48
|
+
def deserialize_args(serialized_data: str) -> WorkflowInputs:
|
49
|
+
"""Deserialize a JSON string back to a Python object list using jsonpickle."""
|
50
|
+
args: WorkflowInputs = jsonpickle.decode(serialized_data)
|
51
|
+
return args
|
52
|
+
|
53
|
+
|
54
|
+
def deserialize_exception(serialized_data: str) -> Exception:
|
55
|
+
"""Deserialize JSON string back to a Python Exception using jsonpickle."""
|
56
|
+
exc: Exception = jsonpickle.decode(serialized_data)
|
57
|
+
return exc
|
58
|
+
|
59
|
+
|
60
|
+
def safe_deserialize(
|
61
|
+
workflow_id: str,
|
62
|
+
*,
|
63
|
+
serialized_input: Optional[str],
|
64
|
+
serialized_output: Optional[str],
|
65
|
+
serialized_exception: Optional[str],
|
66
|
+
) -> tuple[Optional[WorkflowInputs], Optional[Any], Optional[Exception]]:
|
67
|
+
"""
|
68
|
+
This function safely deserializes a workflow's recorded input and output/exception.
|
69
|
+
If any of them is not deserializable, it logs a warning and returns a string instead of throwing an exception.
|
70
|
+
|
71
|
+
This function is used in workflow introspection methods (get_workflows and get_queued_workflow)
|
72
|
+
to ensure errors related to nondeserializable objects are observable.
|
73
|
+
"""
|
74
|
+
input: Optional[WorkflowInputs]
|
75
|
+
try:
|
76
|
+
input = (
|
77
|
+
deserialize_args(serialized_input) if serialized_input is not None else None
|
78
|
+
)
|
79
|
+
except Exception as e:
|
80
|
+
dbos_logger.warning(
|
81
|
+
f"Warning: input object could not be deserialized for workflow {workflow_id}, returning as string: {e}"
|
82
|
+
)
|
83
|
+
input = serialized_input # type: ignore
|
84
|
+
output: Optional[Any]
|
85
|
+
try:
|
86
|
+
output = (
|
87
|
+
deserialize(serialized_output) if serialized_output is not None else None
|
88
|
+
)
|
89
|
+
except Exception as e:
|
90
|
+
dbos_logger.warning(
|
91
|
+
f"Warning: output object could not be deserialized for workflow {workflow_id}, returning as string: {e}"
|
92
|
+
)
|
93
|
+
output = serialized_output
|
94
|
+
exception: Optional[Exception]
|
95
|
+
try:
|
96
|
+
exception = (
|
97
|
+
deserialize_exception(serialized_exception)
|
98
|
+
if serialized_exception is not None
|
99
|
+
else None
|
100
|
+
)
|
101
|
+
except Exception as e:
|
102
|
+
dbos_logger.warning(
|
103
|
+
f"Warning: exception object could not be deserialized for workflow {workflow_id}, returning as string: {e}"
|
104
|
+
)
|
105
|
+
exception = serialized_exception # type: ignore
|
106
|
+
return input, output, exception
|
@@ -901,13 +901,15 @@ class SystemDatabase:
|
|
901
901
|
info.app_version = row[14]
|
902
902
|
info.app_id = row[15]
|
903
903
|
|
904
|
-
inputs = _serialization.
|
905
|
-
|
906
|
-
|
907
|
-
|
908
|
-
|
909
|
-
|
910
|
-
|
904
|
+
inputs, output, exception = _serialization.safe_deserialize(
|
905
|
+
info.workflow_id,
|
906
|
+
serialized_input=row[16],
|
907
|
+
serialized_output=row[17],
|
908
|
+
serialized_exception=row[18],
|
909
|
+
)
|
910
|
+
info.input = inputs
|
911
|
+
info.output = output
|
912
|
+
info.error = exception
|
911
913
|
|
912
914
|
infos.append(info)
|
913
915
|
return infos
|
@@ -1007,13 +1009,15 @@ class SystemDatabase:
|
|
1007
1009
|
info.app_version = row[14]
|
1008
1010
|
info.app_id = row[15]
|
1009
1011
|
|
1010
|
-
inputs = _serialization.
|
1011
|
-
|
1012
|
-
|
1013
|
-
|
1014
|
-
|
1015
|
-
|
1016
|
-
|
1012
|
+
inputs, output, exception = _serialization.safe_deserialize(
|
1013
|
+
info.workflow_id,
|
1014
|
+
serialized_input=row[16],
|
1015
|
+
serialized_output=row[17],
|
1016
|
+
serialized_exception=row[18],
|
1017
|
+
)
|
1018
|
+
info.input = inputs
|
1019
|
+
info.output = output
|
1020
|
+
info.error = exception
|
1017
1021
|
|
1018
1022
|
infos.append(info)
|
1019
1023
|
|
@@ -3,6 +3,7 @@ import socket
|
|
3
3
|
import threading
|
4
4
|
import time
|
5
5
|
import uuid
|
6
|
+
from datetime import datetime
|
6
7
|
|
7
8
|
import pytest
|
8
9
|
import requests
|
@@ -65,8 +66,59 @@ def test_admin_endpoints(dbos: DBOS) -> None:
|
|
65
66
|
response = requests.get("http://localhost:3001/deactivate", timeout=5)
|
66
67
|
assert response.status_code == 200
|
67
68
|
|
68
|
-
for event in dbos.
|
69
|
-
assert event.is_set()
|
69
|
+
for event in dbos.poller_stop_events:
|
70
|
+
assert event.is_set()
|
71
|
+
|
72
|
+
|
73
|
+
def test_deactivate(dbos: DBOS, config: ConfigFile) -> None:
|
74
|
+
wf_counter: int = 0
|
75
|
+
|
76
|
+
queue = Queue("example-queue")
|
77
|
+
|
78
|
+
@DBOS.scheduled("* * * * * *")
|
79
|
+
@DBOS.workflow()
|
80
|
+
def test_workflow(scheduled: datetime, actual: datetime) -> None:
|
81
|
+
nonlocal wf_counter
|
82
|
+
wf_counter += 1
|
83
|
+
|
84
|
+
@DBOS.workflow()
|
85
|
+
def regular_workflow() -> int:
|
86
|
+
return 5
|
87
|
+
|
88
|
+
# Let the scheduled workflow run
|
89
|
+
time.sleep(2)
|
90
|
+
val = wf_counter
|
91
|
+
assert val > 0
|
92
|
+
# Deactivate--scheduled workflow should stop
|
93
|
+
response = requests.get("http://localhost:3001/deactivate", timeout=5)
|
94
|
+
assert response.status_code == 200
|
95
|
+
for event in dbos.poller_stop_events:
|
96
|
+
assert event.is_set()
|
97
|
+
# Verify the scheduled workflow does not run anymore
|
98
|
+
time.sleep(3)
|
99
|
+
assert wf_counter <= val + 1
|
100
|
+
# Enqueue a workflow, verify it still runs
|
101
|
+
assert queue.enqueue(regular_workflow).get_result() == 5
|
102
|
+
|
103
|
+
# Test deferred event receivers
|
104
|
+
DBOS.destroy(destroy_registry=True)
|
105
|
+
dbos = DBOS(config=config)
|
106
|
+
|
107
|
+
@DBOS.scheduled("* * * * * *")
|
108
|
+
@DBOS.workflow()
|
109
|
+
def deferred_workflow(scheduled: datetime, actual: datetime) -> None:
|
110
|
+
nonlocal wf_counter
|
111
|
+
wf_counter += 1
|
112
|
+
|
113
|
+
DBOS.launch()
|
114
|
+
assert len(dbos.poller_stop_events) > 0
|
115
|
+
for event in dbos.poller_stop_events:
|
116
|
+
assert not event.is_set()
|
117
|
+
# Deactivate--scheduled workflow should stop
|
118
|
+
response = requests.get("http://localhost:3001/deactivate", timeout=5)
|
119
|
+
assert response.status_code == 200
|
120
|
+
for event in dbos.poller_stop_events:
|
121
|
+
assert event.is_set()
|
70
122
|
|
71
123
|
|
72
124
|
def test_admin_recovery(config: ConfigFile) -> None:
|
@@ -11,9 +11,16 @@ from dbos import DBOS, Queue, SetWorkflowID
|
|
11
11
|
from dbos._error import (
|
12
12
|
DBOSDeadLetterQueueError,
|
13
13
|
DBOSMaxStepRetriesExceeded,
|
14
|
+
DBOSNotAuthorizedError,
|
15
|
+
DBOSQueueDeduplicatedError,
|
14
16
|
DBOSUnexpectedStepError,
|
15
17
|
)
|
16
18
|
from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
|
19
|
+
from dbos._serialization import (
|
20
|
+
deserialize_exception,
|
21
|
+
safe_deserialize,
|
22
|
+
serialize_exception,
|
23
|
+
)
|
17
24
|
from dbos._sys_db import WorkflowStatusString
|
18
25
|
|
19
26
|
from .conftest import queue_entries_are_cleaned_up
|
@@ -434,3 +441,42 @@ def test_keyboardinterrupt_during_retries(dbos: DBOS) -> None:
|
|
434
441
|
recovery_handles = DBOS._recover_pending_workflows()
|
435
442
|
assert len(recovery_handles) == 1
|
436
443
|
assert recovery_handles[0].get_result() == recovery_handles[0].workflow_id
|
444
|
+
|
445
|
+
|
446
|
+
def test_error_serialization() -> None:
|
447
|
+
# Verify that each exception that can be thrown in a workflow
|
448
|
+
# is serializable and deserializable
|
449
|
+
# DBOSMaxStepRetriesExceeded
|
450
|
+
e: Exception = DBOSMaxStepRetriesExceeded("step", 1)
|
451
|
+
d = deserialize_exception(serialize_exception(e))
|
452
|
+
assert isinstance(d, DBOSMaxStepRetriesExceeded)
|
453
|
+
assert str(d) == str(e)
|
454
|
+
# DBOSNotAuthorizedError
|
455
|
+
e = DBOSNotAuthorizedError("no")
|
456
|
+
d = deserialize_exception(serialize_exception(e))
|
457
|
+
assert isinstance(d, DBOSNotAuthorizedError)
|
458
|
+
assert str(d) == str(e)
|
459
|
+
# DBOSQueueDeduplicatedError
|
460
|
+
e = DBOSQueueDeduplicatedError("id", "queue", "dedup")
|
461
|
+
d = deserialize_exception(serialize_exception(e))
|
462
|
+
assert isinstance(d, DBOSQueueDeduplicatedError)
|
463
|
+
assert str(d) == str(e)
|
464
|
+
|
465
|
+
# Test safe_deserialize
|
466
|
+
class BadException(Exception):
|
467
|
+
def __init__(self, one: int, two: int) -> None:
|
468
|
+
super().__init__(f"Message: {one}, {two}")
|
469
|
+
|
470
|
+
bad_exception = BadException(1, 2)
|
471
|
+
with pytest.raises(TypeError):
|
472
|
+
deserialize_exception(serialize_exception(bad_exception))
|
473
|
+
input, output, exception = safe_deserialize(
|
474
|
+
"my_id",
|
475
|
+
serialized_input=None,
|
476
|
+
serialized_exception=serialize_exception(bad_exception),
|
477
|
+
serialized_output=None,
|
478
|
+
)
|
479
|
+
assert input is None
|
480
|
+
assert output is None
|
481
|
+
assert isinstance(exception, str)
|
482
|
+
assert "Message: 1, 2" in exception
|
@@ -1,55 +0,0 @@
|
|
1
|
-
import types
|
2
|
-
from typing import Any, Dict, Tuple, TypedDict
|
3
|
-
|
4
|
-
import jsonpickle # type: ignore
|
5
|
-
|
6
|
-
|
7
|
-
class WorkflowInputs(TypedDict):
|
8
|
-
args: Tuple[Any, ...]
|
9
|
-
kwargs: Dict[str, Any]
|
10
|
-
|
11
|
-
|
12
|
-
def _validate_item(data: Any) -> None:
|
13
|
-
if isinstance(data, (types.FunctionType, types.MethodType)):
|
14
|
-
raise TypeError("Serialized data item should not be a function")
|
15
|
-
|
16
|
-
|
17
|
-
def serialize(data: Any) -> str:
|
18
|
-
"""Serialize an object to a JSON string using jsonpickle."""
|
19
|
-
_validate_item(data)
|
20
|
-
encoded_data: str = jsonpickle.encode(data, unpicklable=True)
|
21
|
-
return encoded_data
|
22
|
-
|
23
|
-
|
24
|
-
def serialize_args(data: WorkflowInputs) -> str:
|
25
|
-
"""Serialize args to a JSON string using jsonpickle."""
|
26
|
-
arg: Any
|
27
|
-
for arg in data["args"]:
|
28
|
-
_validate_item(arg)
|
29
|
-
for arg in data["kwargs"].values():
|
30
|
-
_validate_item(arg)
|
31
|
-
encoded_data: str = jsonpickle.encode(data, unpicklable=True)
|
32
|
-
return encoded_data
|
33
|
-
|
34
|
-
|
35
|
-
def serialize_exception(data: Exception) -> str:
|
36
|
-
"""Serialize an Exception object to a JSON string using jsonpickle."""
|
37
|
-
encoded_data: str = jsonpickle.encode(data, unpicklable=True)
|
38
|
-
return encoded_data
|
39
|
-
|
40
|
-
|
41
|
-
def deserialize(serialized_data: str) -> Any:
|
42
|
-
"""Deserialize a JSON string back to a Python object using jsonpickle."""
|
43
|
-
return jsonpickle.decode(serialized_data)
|
44
|
-
|
45
|
-
|
46
|
-
def deserialize_args(serialized_data: str) -> WorkflowInputs:
|
47
|
-
"""Deserialize a JSON string back to a Python object list using jsonpickle."""
|
48
|
-
args: WorkflowInputs = jsonpickle.decode(serialized_data)
|
49
|
-
return args
|
50
|
-
|
51
|
-
|
52
|
-
def deserialize_exception(serialized_data: str) -> Exception:
|
53
|
-
"""Deserialize JSON string back to a Python Exception using jsonpickle."""
|
54
|
-
upo: Exception = jsonpickle.decode(serialized_data)
|
55
|
-
return upo
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
{dbos-0.27.0a10 → dbos-0.27.1}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|