dbos 1.1.0a3__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_admin_server.py +24 -4
- dbos/_app_db.py +0 -15
- dbos/_client.py +4 -3
- dbos/_context.py +6 -0
- dbos/_core.py +11 -4
- dbos/_dbos.py +22 -4
- dbos/_dbos_config.py +21 -39
- dbos/_error.py +14 -0
- dbos/_event_loop.py +10 -7
- dbos/_queue.py +6 -7
- dbos/_sys_db.py +201 -138
- dbos/_utils.py +33 -0
- dbos/_workflow_commands.py +1 -10
- dbos/cli/cli.py +2 -1
- {dbos-1.1.0a3.dist-info → dbos-1.2.0.dist-info}/METADATA +1 -1
- {dbos-1.1.0a3.dist-info → dbos-1.2.0.dist-info}/RECORD +19 -19
- {dbos-1.1.0a3.dist-info → dbos-1.2.0.dist-info}/WHEEL +0 -0
- {dbos-1.1.0a3.dist-info → dbos-1.2.0.dist-info}/entry_points.txt +0 -0
- {dbos-1.1.0a3.dist-info → dbos-1.2.0.dist-info}/licenses/LICENSE +0 -0
dbos/_admin_server.py
CHANGED
@@ -5,8 +5,9 @@ import re
|
|
5
5
|
import threading
|
6
6
|
from functools import partial
|
7
7
|
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
|
8
|
-
from typing import TYPE_CHECKING, Any, List, TypedDict
|
8
|
+
from typing import TYPE_CHECKING, Any, List, Optional, TypedDict
|
9
9
|
|
10
|
+
from ._context import SetWorkflowID
|
10
11
|
from ._error import DBOSException
|
11
12
|
from ._logger import dbos_logger
|
12
13
|
from ._recovery import recover_pending_workflows
|
@@ -141,7 +142,11 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
141
142
|
try:
|
142
143
|
data = json.loads(post_data.decode("utf-8"))
|
143
144
|
start_step: int = data.get("start_step", 1)
|
144
|
-
|
145
|
+
new_workflow_id: Optional[str] = data.get("new_workflow_id")
|
146
|
+
application_version: Optional[str] = data.get("application_version")
|
147
|
+
self._handle_fork(
|
148
|
+
workflow_id, start_step, new_workflow_id, application_version
|
149
|
+
)
|
145
150
|
except (json.JSONDecodeError, AttributeError) as e:
|
146
151
|
self.send_response(500)
|
147
152
|
self.send_header("Content-Type", "application/json")
|
@@ -191,9 +196,24 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
191
196
|
self.end_headers()
|
192
197
|
self.wfile.write(response_body)
|
193
198
|
|
194
|
-
def _handle_fork(
|
199
|
+
def _handle_fork(
|
200
|
+
self,
|
201
|
+
workflow_id: str,
|
202
|
+
start_step: int,
|
203
|
+
new_workflow_id: Optional[str],
|
204
|
+
application_version: Optional[str],
|
205
|
+
) -> None:
|
195
206
|
try:
|
196
|
-
|
207
|
+
print(f"Forking workflow {workflow_id} from step {start_step}")
|
208
|
+
if new_workflow_id is not None:
|
209
|
+
with SetWorkflowID(new_workflow_id):
|
210
|
+
handle = self.dbos.fork_workflow(
|
211
|
+
workflow_id, start_step, application_version=application_version
|
212
|
+
)
|
213
|
+
else:
|
214
|
+
handle = self.dbos.fork_workflow(
|
215
|
+
workflow_id, start_step, application_version=application_version
|
216
|
+
)
|
197
217
|
response_body = json.dumps(
|
198
218
|
{
|
199
219
|
"workflow_id": handle.workflow_id,
|
dbos/_app_db.py
CHANGED
@@ -216,21 +216,6 @@ class ApplicationDatabase:
|
|
216
216
|
for row in rows
|
217
217
|
]
|
218
218
|
|
219
|
-
def get_max_function_id(self, workflow_uuid: str) -> Optional[int]:
|
220
|
-
with self.engine.begin() as conn:
|
221
|
-
max_function_id_row = conn.execute(
|
222
|
-
sa.select(
|
223
|
-
sa.func.max(ApplicationSchema.transaction_outputs.c.function_id)
|
224
|
-
).where(
|
225
|
-
ApplicationSchema.transaction_outputs.c.workflow_uuid
|
226
|
-
== workflow_uuid
|
227
|
-
)
|
228
|
-
).fetchone()
|
229
|
-
|
230
|
-
max_function_id = max_function_id_row[0] if max_function_id_row else None
|
231
|
-
|
232
|
-
return max_function_id
|
233
|
-
|
234
219
|
def clone_workflow_transactions(
|
235
220
|
self, src_workflow_id: str, forked_workflow_id: str, start_step: int
|
236
221
|
) -> None:
|
dbos/_client.py
CHANGED
@@ -3,8 +3,6 @@ import sys
|
|
3
3
|
import uuid
|
4
4
|
from typing import Any, Generic, List, Optional, TypedDict, TypeVar
|
5
5
|
|
6
|
-
from sqlalchemy import URL
|
7
|
-
|
8
6
|
from dbos._app_db import ApplicationDatabase
|
9
7
|
from dbos._context import MaxPriority, MinPriority
|
10
8
|
|
@@ -15,6 +13,7 @@ else:
|
|
15
13
|
|
16
14
|
from dbos import _serialization
|
17
15
|
from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
|
16
|
+
from dbos._dbos_config import is_valid_database_url
|
18
17
|
from dbos._error import DBOSException, DBOSNonExistentWorkflowError
|
19
18
|
from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
|
20
19
|
from dbos._serialization import WorkflowInputs
|
@@ -99,6 +98,7 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
|
|
99
98
|
|
100
99
|
class DBOSClient:
|
101
100
|
def __init__(self, database_url: str, *, system_database: Optional[str] = None):
|
101
|
+
assert is_valid_database_url(database_url)
|
102
102
|
# We only create database connections but do not run migrations
|
103
103
|
self._sys_db = SystemDatabase(
|
104
104
|
database_url=database_url,
|
@@ -109,6 +109,7 @@ class DBOSClient:
|
|
109
109
|
},
|
110
110
|
sys_db_name=system_database,
|
111
111
|
)
|
112
|
+
self._sys_db.check_connection()
|
112
113
|
self._app_db = ApplicationDatabase(
|
113
114
|
database_url=database_url,
|
114
115
|
engine_kwargs={
|
@@ -231,7 +232,7 @@ class DBOSClient:
|
|
231
232
|
"workflow_deadline_epoch_ms": None,
|
232
233
|
}
|
233
234
|
with self._sys_db.engine.begin() as conn:
|
234
|
-
self._sys_db.
|
235
|
+
self._sys_db._insert_workflow_status(
|
235
236
|
status, conn, max_recovery_attempts=None
|
236
237
|
)
|
237
238
|
self._sys_db.send(status["workflow_uuid"], 0, destination_id, message, topic)
|
dbos/_context.py
CHANGED
@@ -392,6 +392,7 @@ class SetWorkflowTimeout:
|
|
392
392
|
else None
|
393
393
|
)
|
394
394
|
self.saved_workflow_timeout: Optional[int] = None
|
395
|
+
self.saved_workflow_deadline_epoch_ms: Optional[int] = None
|
395
396
|
|
396
397
|
def __enter__(self) -> SetWorkflowTimeout:
|
397
398
|
# Code to create a basic context
|
@@ -402,6 +403,8 @@ class SetWorkflowTimeout:
|
|
402
403
|
ctx = assert_current_dbos_context()
|
403
404
|
self.saved_workflow_timeout = ctx.workflow_timeout_ms
|
404
405
|
ctx.workflow_timeout_ms = self.workflow_timeout_ms
|
406
|
+
self.saved_workflow_deadline_epoch_ms = ctx.workflow_deadline_epoch_ms
|
407
|
+
ctx.workflow_deadline_epoch_ms = None
|
405
408
|
return self
|
406
409
|
|
407
410
|
def __exit__(
|
@@ -411,6 +414,9 @@ class SetWorkflowTimeout:
|
|
411
414
|
traceback: Optional[TracebackType],
|
412
415
|
) -> Literal[False]:
|
413
416
|
assert_current_dbos_context().workflow_timeout_ms = self.saved_workflow_timeout
|
417
|
+
assert_current_dbos_context().workflow_deadline_epoch_ms = (
|
418
|
+
self.saved_workflow_deadline_epoch_ms
|
419
|
+
)
|
414
420
|
# Code to clean up the basic context if we created it
|
415
421
|
if self.created_ctx:
|
416
422
|
_clear_local_dbos_context()
|
dbos/_core.py
CHANGED
@@ -20,8 +20,10 @@ from typing import (
|
|
20
20
|
cast,
|
21
21
|
)
|
22
22
|
|
23
|
+
import psycopg
|
24
|
+
|
23
25
|
from dbos._outcome import Immediate, NoResult, Outcome, Pending
|
24
|
-
from dbos._utils import GlobalParams
|
26
|
+
from dbos._utils import GlobalParams, retriable_postgres_exception
|
25
27
|
|
26
28
|
from ._app_db import ApplicationDatabase, TransactionResultInternal
|
27
29
|
|
@@ -602,7 +604,6 @@ async def start_workflow_async(
|
|
602
604
|
*args: P.args,
|
603
605
|
**kwargs: P.kwargs,
|
604
606
|
) -> "WorkflowHandleAsync[R]":
|
605
|
-
|
606
607
|
# If the function has a class, add the class object as its first argument
|
607
608
|
fself: Optional[object] = None
|
608
609
|
if hasattr(func, "__self__"):
|
@@ -932,12 +933,18 @@ def decorate_transaction(
|
|
932
933
|
)
|
933
934
|
break
|
934
935
|
except DBAPIError as dbapi_error:
|
935
|
-
|
936
|
+
driver_error = cast(
|
937
|
+
Optional[psycopg.OperationalError], dbapi_error.orig
|
938
|
+
)
|
939
|
+
if retriable_postgres_exception(dbapi_error) or (
|
940
|
+
driver_error is not None
|
941
|
+
and driver_error.sqlstate == "40001"
|
942
|
+
):
|
936
943
|
# Retry on serialization failure
|
937
944
|
span = ctx.get_current_span()
|
938
945
|
if span:
|
939
946
|
span.add_event(
|
940
|
-
"Transaction
|
947
|
+
"Transaction Failure",
|
941
948
|
{"retry_wait_seconds": retry_wait_seconds},
|
942
949
|
)
|
943
950
|
time.sleep(retry_wait_seconds)
|
dbos/_dbos.py
CHANGED
@@ -90,10 +90,8 @@ from ._context import (
|
|
90
90
|
from ._dbos_config import (
|
91
91
|
ConfigFile,
|
92
92
|
DBOSConfig,
|
93
|
-
check_config_consistency,
|
94
93
|
overwrite_config,
|
95
94
|
process_config,
|
96
|
-
set_env_vars,
|
97
95
|
translate_dbos_config_to_config_file,
|
98
96
|
)
|
99
97
|
from ._error import (
|
@@ -299,6 +297,7 @@ class DBOS:
|
|
299
297
|
|
300
298
|
self._launched: bool = False
|
301
299
|
self._debug_mode: bool = False
|
300
|
+
self._configured_threadpool: bool = False
|
302
301
|
self._sys_db_field: Optional[SystemDatabase] = None
|
303
302
|
self._app_db_field: Optional[ApplicationDatabase] = None
|
304
303
|
self._registry: DBOSRegistry = _get_or_create_dbos_registry()
|
@@ -323,14 +322,12 @@ class DBOS:
|
|
323
322
|
unvalidated_config = translate_dbos_config_to_config_file(config)
|
324
323
|
if os.environ.get("DBOS__CLOUD") == "true":
|
325
324
|
unvalidated_config = overwrite_config(unvalidated_config)
|
326
|
-
check_config_consistency(name=unvalidated_config["name"])
|
327
325
|
|
328
326
|
if unvalidated_config is not None:
|
329
327
|
self._config: ConfigFile = process_config(data=unvalidated_config)
|
330
328
|
else:
|
331
329
|
raise ValueError("No valid configuration was loaded.")
|
332
330
|
|
333
|
-
set_env_vars(self._config)
|
334
331
|
config_logger(self._config)
|
335
332
|
dbos_tracer.config(self._config)
|
336
333
|
dbos_logger.info(f"Initializing DBOS (v{GlobalParams.dbos_version})")
|
@@ -719,6 +716,7 @@ class DBOS:
|
|
719
716
|
**kwargs: P.kwargs,
|
720
717
|
) -> WorkflowHandleAsync[R]:
|
721
718
|
"""Invoke a workflow function on the event loop, returning a handle to the ongoing execution."""
|
719
|
+
await cls._configure_asyncio_thread_pool()
|
722
720
|
return await start_workflow_async(
|
723
721
|
_get_dbos_instance(), func, None, True, *args, **kwargs
|
724
722
|
)
|
@@ -736,6 +734,7 @@ class DBOS:
|
|
736
734
|
async def get_workflow_status_async(
|
737
735
|
cls, workflow_id: str
|
738
736
|
) -> Optional[WorkflowStatus]:
|
737
|
+
await cls._configure_asyncio_thread_pool()
|
739
738
|
"""Return the status of a workflow execution."""
|
740
739
|
return await asyncio.to_thread(cls.get_workflow_status, workflow_id)
|
741
740
|
|
@@ -757,6 +756,7 @@ class DBOS:
|
|
757
756
|
) -> WorkflowHandleAsync[R]:
|
758
757
|
"""Return a `WorkflowHandle` for a workflow execution."""
|
759
758
|
dbos = _get_dbos_instance()
|
759
|
+
await cls._configure_asyncio_thread_pool()
|
760
760
|
if existing_workflow:
|
761
761
|
stat = await dbos.get_workflow_status_async(workflow_id)
|
762
762
|
if stat is None:
|
@@ -775,6 +775,7 @@ class DBOS:
|
|
775
775
|
cls, destination_id: str, message: Any, topic: Optional[str] = None
|
776
776
|
) -> None:
|
777
777
|
"""Send a message to a workflow execution."""
|
778
|
+
await cls._configure_asyncio_thread_pool()
|
778
779
|
await asyncio.to_thread(lambda: DBOS.send(destination_id, message, topic))
|
779
780
|
|
780
781
|
@classmethod
|
@@ -797,6 +798,7 @@ class DBOS:
|
|
797
798
|
This function is to be called from within a workflow.
|
798
799
|
`recv_async` will return the message sent on `topic`, asyncronously waiting if necessary.
|
799
800
|
"""
|
801
|
+
await cls._configure_asyncio_thread_pool()
|
800
802
|
return await asyncio.to_thread(lambda: DBOS.recv(topic, timeout_seconds))
|
801
803
|
|
802
804
|
@classmethod
|
@@ -835,6 +837,7 @@ class DBOS:
|
|
835
837
|
It is important to use `DBOS.sleep` or `DBOS.sleep_async` (as opposed to any other sleep) within workflows,
|
836
838
|
as the DBOS sleep methods are durable and completed sleeps will be skipped during recovery.
|
837
839
|
"""
|
840
|
+
await cls._configure_asyncio_thread_pool()
|
838
841
|
await asyncio.to_thread(lambda: DBOS.sleep(seconds))
|
839
842
|
|
840
843
|
@classmethod
|
@@ -869,6 +872,7 @@ class DBOS:
|
|
869
872
|
value(Any): A serializable value to associate with the key
|
870
873
|
|
871
874
|
"""
|
875
|
+
await cls._configure_asyncio_thread_pool()
|
872
876
|
await asyncio.to_thread(lambda: DBOS.set_event(key, value))
|
873
877
|
|
874
878
|
@classmethod
|
@@ -901,6 +905,7 @@ class DBOS:
|
|
901
905
|
timeout_seconds(float): The amount of time to wait, in case `set_event` has not yet been called byt the workflow
|
902
906
|
|
903
907
|
"""
|
908
|
+
await cls._configure_asyncio_thread_pool()
|
904
909
|
return await asyncio.to_thread(
|
905
910
|
lambda: DBOS.get_event(workflow_id, key, timeout_seconds)
|
906
911
|
)
|
@@ -929,6 +934,19 @@ class DBOS:
|
|
929
934
|
fn, "DBOS.cancelWorkflow"
|
930
935
|
)
|
931
936
|
|
937
|
+
@classmethod
|
938
|
+
async def _configure_asyncio_thread_pool(cls) -> None:
|
939
|
+
"""
|
940
|
+
Configure the thread pool for asyncio.to_thread.
|
941
|
+
|
942
|
+
This function is called before the first call to asyncio.to_thread.
|
943
|
+
"""
|
944
|
+
if _get_dbos_instance()._configured_threadpool:
|
945
|
+
return
|
946
|
+
loop = asyncio.get_running_loop()
|
947
|
+
loop.set_default_executor(_get_dbos_instance()._executor)
|
948
|
+
_get_dbos_instance()._configured_threadpool = True
|
949
|
+
|
932
950
|
@classmethod
|
933
951
|
def resume_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
|
934
952
|
"""Resume a workflow by ID."""
|
dbos/_dbos_config.py
CHANGED
@@ -329,17 +329,9 @@ def process_config(
|
|
329
329
|
if data.get("database_url") is not None and data["database_url"] != "":
|
330
330
|
# Parse the db string and check required fields
|
331
331
|
assert data["database_url"] is not None
|
332
|
+
assert is_valid_database_url(data["database_url"])
|
333
|
+
|
332
334
|
url = make_url(data["database_url"])
|
333
|
-
required_fields = [
|
334
|
-
("username", "Username must be specified in the connection URL"),
|
335
|
-
("password", "Password must be specified in the connection URL"),
|
336
|
-
("host", "Host must be specified in the connection URL"),
|
337
|
-
("database", "Database name must be specified in the connection URL"),
|
338
|
-
]
|
339
|
-
for field_name, error_message in required_fields:
|
340
|
-
field_value = getattr(url, field_name, None)
|
341
|
-
if not field_value:
|
342
|
-
raise DBOSInitializationError(error_message)
|
343
335
|
|
344
336
|
if not data["database"].get("sys_db_name"):
|
345
337
|
assert url.database is not None
|
@@ -385,6 +377,9 @@ def process_config(
|
|
385
377
|
if not silent and logs["logLevel"] == "INFO" or logs["logLevel"] == "DEBUG":
|
386
378
|
log_url = make_url(data["database_url"]).render_as_string(hide_password=True)
|
387
379
|
print(f"[bold blue]Using database connection string: {log_url}[/bold blue]")
|
380
|
+
print(
|
381
|
+
f"[bold blue]Database engine parameters: {data['database']['db_engine_kwargs']}[/bold blue]"
|
382
|
+
)
|
388
383
|
|
389
384
|
# Return data as ConfigFile type
|
390
385
|
return data
|
@@ -407,6 +402,7 @@ def configure_db_engine_parameters(
|
|
407
402
|
"pool_timeout": 30,
|
408
403
|
"max_overflow": 0,
|
409
404
|
"pool_size": 20,
|
405
|
+
"pool_pre_ping": True,
|
410
406
|
}
|
411
407
|
# If user-provided kwargs are present, use them instead
|
412
408
|
user_kwargs = data.get("db_engine_kwargs")
|
@@ -431,6 +427,21 @@ def configure_db_engine_parameters(
|
|
431
427
|
data["sys_db_engine_kwargs"] = system_engine_kwargs
|
432
428
|
|
433
429
|
|
430
|
+
def is_valid_database_url(database_url: str) -> bool:
|
431
|
+
url = make_url(database_url)
|
432
|
+
required_fields = [
|
433
|
+
("username", "Username must be specified in the connection URL"),
|
434
|
+
("password", "Password must be specified in the connection URL"),
|
435
|
+
("host", "Host must be specified in the connection URL"),
|
436
|
+
("database", "Database name must be specified in the connection URL"),
|
437
|
+
]
|
438
|
+
for field_name, error_message in required_fields:
|
439
|
+
field_value = getattr(url, field_name, None)
|
440
|
+
if not field_value:
|
441
|
+
raise DBOSInitializationError(error_message)
|
442
|
+
return True
|
443
|
+
|
444
|
+
|
434
445
|
def _is_valid_app_name(name: str) -> bool:
|
435
446
|
name_len = len(name)
|
436
447
|
if name_len < 3 or name_len > 30:
|
@@ -444,12 +455,6 @@ def _app_name_to_db_name(app_name: str) -> str:
|
|
444
455
|
return name if not name[0].isdigit() else f"_{name}"
|
445
456
|
|
446
457
|
|
447
|
-
def set_env_vars(config: ConfigFile) -> None:
|
448
|
-
for env, value in config.get("env", {}).items():
|
449
|
-
if value is not None:
|
450
|
-
os.environ[env] = str(value)
|
451
|
-
|
452
|
-
|
453
458
|
def overwrite_config(provided_config: ConfigFile) -> ConfigFile:
|
454
459
|
# Load the DBOS configuration file and force the use of:
|
455
460
|
# 1. The database url provided by DBOS_DATABASE_URL
|
@@ -529,26 +534,3 @@ def overwrite_config(provided_config: ConfigFile) -> ConfigFile:
|
|
529
534
|
del provided_config["env"]
|
530
535
|
|
531
536
|
return provided_config
|
532
|
-
|
533
|
-
|
534
|
-
def check_config_consistency(
|
535
|
-
*,
|
536
|
-
name: str,
|
537
|
-
config_file_path: str = DBOS_CONFIG_PATH,
|
538
|
-
) -> None:
|
539
|
-
# First load the config file and check whether it is present
|
540
|
-
try:
|
541
|
-
config = load_config(config_file_path, silent=True, run_process_config=False)
|
542
|
-
except FileNotFoundError:
|
543
|
-
dbos_logger.debug(
|
544
|
-
f"No configuration file {config_file_path} found. Skipping consistency check with provided config."
|
545
|
-
)
|
546
|
-
return
|
547
|
-
except Exception as e:
|
548
|
-
raise e
|
549
|
-
|
550
|
-
# Check the name
|
551
|
-
if name != config["name"]:
|
552
|
-
raise DBOSInitializationError(
|
553
|
-
f"Provided app name '{name}' does not match the app name '{config['name']}' in {config_file_path}."
|
554
|
-
)
|
dbos/_error.py
CHANGED
@@ -62,6 +62,7 @@ class DBOSErrorCode(Enum):
|
|
62
62
|
WorkflowCancelled = 10
|
63
63
|
UnexpectedStep = 11
|
64
64
|
QueueDeduplicated = 12
|
65
|
+
AwaitedWorkflowCancelled = 13
|
65
66
|
ConflictingRegistrationError = 25
|
66
67
|
|
67
68
|
|
@@ -206,6 +207,19 @@ class DBOSQueueDeduplicatedError(DBOSException):
|
|
206
207
|
)
|
207
208
|
|
208
209
|
|
210
|
+
class DBOSAwaitedWorkflowCancelledError(DBOSException):
|
211
|
+
def __init__(self, workflow_id: str):
|
212
|
+
self.workflow_id = workflow_id
|
213
|
+
super().__init__(
|
214
|
+
f"Awaited workflow {workflow_id} was cancelled",
|
215
|
+
dbos_error_code=DBOSErrorCode.AwaitedWorkflowCancelled.value,
|
216
|
+
)
|
217
|
+
|
218
|
+
def __reduce__(self) -> Any:
|
219
|
+
# Tell jsonpickle how to reconstruct this object
|
220
|
+
return (self.__class__, (self.workflow_id,))
|
221
|
+
|
222
|
+
|
209
223
|
#######################################
|
210
224
|
## BaseException
|
211
225
|
#######################################
|
dbos/_event_loop.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
import asyncio
|
2
2
|
import threading
|
3
|
+
from concurrent.futures import ThreadPoolExecutor
|
3
4
|
from typing import Any, Coroutine, Optional, TypeVar
|
4
5
|
|
5
6
|
|
@@ -33,15 +34,17 @@ class BackgroundEventLoop:
|
|
33
34
|
|
34
35
|
def _run_event_loop(self) -> None:
|
35
36
|
self._loop = asyncio.new_event_loop()
|
36
|
-
|
37
|
+
with ThreadPoolExecutor(max_workers=64) as thread_pool:
|
38
|
+
self._loop.set_default_executor(thread_pool)
|
39
|
+
asyncio.set_event_loop(self._loop)
|
37
40
|
|
38
|
-
|
39
|
-
|
41
|
+
self._running = True
|
42
|
+
self._ready.set() # Signal that the loop is ready
|
40
43
|
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
44
|
+
try:
|
45
|
+
self._loop.run_forever()
|
46
|
+
finally:
|
47
|
+
self._loop.close()
|
45
48
|
|
46
49
|
async def _shutdown(self) -> None:
|
47
50
|
if self._loop is None:
|
dbos/_queue.py
CHANGED
@@ -5,6 +5,7 @@ from typing import TYPE_CHECKING, Any, Callable, Coroutine, Optional, TypedDict
|
|
5
5
|
from psycopg import errors
|
6
6
|
from sqlalchemy.exc import OperationalError
|
7
7
|
|
8
|
+
from dbos._logger import dbos_logger
|
8
9
|
from dbos._utils import GlobalParams
|
9
10
|
|
10
11
|
from ._core import P, R, execute_workflow_by_id, start_workflow, start_workflow_async
|
@@ -56,6 +57,8 @@ class Queue:
|
|
56
57
|
from ._dbos import _get_or_create_dbos_registry
|
57
58
|
|
58
59
|
registry = _get_or_create_dbos_registry()
|
60
|
+
if self.name in registry.queue_info_map:
|
61
|
+
dbos_logger.warning(f"Queue {name} has already been declared")
|
59
62
|
registry.queue_info_map[self.name] = self
|
60
63
|
|
61
64
|
def enqueue(
|
@@ -95,12 +98,8 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
|
|
95
98
|
if not isinstance(
|
96
99
|
e.orig, (errors.SerializationFailure, errors.LockNotAvailable)
|
97
100
|
):
|
98
|
-
dbos.logger.warning(
|
99
|
-
|
100
|
-
)
|
101
|
-
except Exception:
|
101
|
+
dbos.logger.warning(f"Exception encountered in queue thread: {e}")
|
102
|
+
except Exception as e:
|
102
103
|
if not stop_event.is_set():
|
103
104
|
# Only print the error if the thread is not stopping
|
104
|
-
dbos.logger.warning(
|
105
|
-
f"Exception encountered in queue thread: {traceback.format_exc()}"
|
106
|
-
)
|
105
|
+
dbos.logger.warning(f"Exception encountered in queue thread: {e}")
|