dbos 2.3.0a1__tar.gz → 2.3.0a3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- {dbos-2.3.0a1 → dbos-2.3.0a3}/PKG-INFO +1 -1
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_core.py +0 -21
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_dbos_config.py +1 -2
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_kafka.py +6 -4
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_scheduler.py +5 -2
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_serialization.py +7 -3
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_sys_db_postgres.py +1 -1
- {dbos-2.3.0a1 → dbos-2.3.0a3}/pyproject.toml +1 -1
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_config.py +29 -35
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_dbos.py +60 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_failures.py +14 -1
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_kafka.py +50 -17
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_scheduler.py +13 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/LICENSE +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/README.md +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/__init__.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/__main__.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_admin_server.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_app_db.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_classproperty.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_client.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_conductor/conductor.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_conductor/protocol.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_context.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_croniter.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_dbos.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_debouncer.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_debug.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_docker_pg_helper.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_error.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_event_loop.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_fastapi.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_flask.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_kafka_message.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_logger.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_migration.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_outcome.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_queue.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_recovery.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_registrations.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_roles.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_schemas/__init__.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_schemas/application_database.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_schemas/system_database.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_sys_db.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_sys_db_sqlite.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_tracer.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_utils.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_workflow_commands.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/cli/_github_init.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/cli/_template_init.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/cli/cli.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/cli/migration.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/dbos-config.schema.json +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/py.typed +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/__init__.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/atexit_no_ctor.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/atexit_no_launch.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/classdefs.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/client_collateral.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/client_worker.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/conftest.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/dupname_classdefs1.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/dupname_classdefsa.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/more_classdefs.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/queuedworkflow.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/script_without_fastapi.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_admin_server.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_async.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_async_workflow_management.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_classdecorators.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_cli.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_client.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_concurrency.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_croniter.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_debouncer.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_debug.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_docker_secrets.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_fastapi.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_fastapi_roles.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_flask.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_outcome.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_package.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_queue.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_schema_migration.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_singleton.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_spans.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_sqlalchemy.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_streaming.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_workflow_introspection.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/tests/test_workflow_management.py +0 -0
- {dbos-2.3.0a1 → dbos-2.3.0a3}/version/__init__.py +0 -0
|
@@ -93,14 +93,6 @@ TEMP_SEND_WF_NAME = "<temp>.temp_send_workflow"
|
|
|
93
93
|
DEBOUNCER_WORKFLOW_NAME = "_dbos_debouncer_workflow"
|
|
94
94
|
|
|
95
95
|
|
|
96
|
-
def check_is_in_coroutine() -> bool:
|
|
97
|
-
try:
|
|
98
|
-
asyncio.get_running_loop()
|
|
99
|
-
return True
|
|
100
|
-
except RuntimeError:
|
|
101
|
-
return False
|
|
102
|
-
|
|
103
|
-
|
|
104
96
|
class WorkflowHandleFuture(Generic[R]):
|
|
105
97
|
|
|
106
98
|
def __init__(self, workflow_id: str, future: Future[R], dbos: "DBOS"):
|
|
@@ -856,11 +848,6 @@ def workflow_wrapper(
|
|
|
856
848
|
dbos._sys_db.record_get_result(workflow_id, serialized_r, None)
|
|
857
849
|
return r
|
|
858
850
|
|
|
859
|
-
if check_is_in_coroutine() and not inspect.iscoroutinefunction(func):
|
|
860
|
-
dbos_logger.warning(
|
|
861
|
-
f"Sync workflow ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Define it as async or use asyncio.to_thread instead."
|
|
862
|
-
)
|
|
863
|
-
|
|
864
851
|
outcome = (
|
|
865
852
|
wfOutcome.wrap(init_wf, dbos=dbos)
|
|
866
853
|
.also(DBOSAssumeRole(rr))
|
|
@@ -1046,10 +1033,6 @@ def decorate_transaction(
|
|
|
1046
1033
|
assert (
|
|
1047
1034
|
ctx.is_workflow()
|
|
1048
1035
|
), "Transactions must be called from within workflows"
|
|
1049
|
-
if check_is_in_coroutine():
|
|
1050
|
-
dbos_logger.warning(
|
|
1051
|
-
f"Transaction function ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Use asyncio.to_thread instead."
|
|
1052
|
-
)
|
|
1053
1036
|
with DBOSAssumeRole(rr):
|
|
1054
1037
|
return invoke_tx(*args, **kwargs)
|
|
1055
1038
|
else:
|
|
@@ -1194,10 +1177,6 @@ def decorate_step(
|
|
|
1194
1177
|
|
|
1195
1178
|
@wraps(func)
|
|
1196
1179
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
1197
|
-
if check_is_in_coroutine() and not inspect.iscoroutinefunction(func):
|
|
1198
|
-
dbos_logger.warning(
|
|
1199
|
-
f"Sync step ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Define it as async or use asyncio.to_thread instead."
|
|
1200
|
-
)
|
|
1201
1180
|
# If the step is called from a workflow, run it as a step.
|
|
1202
1181
|
# Otherwise, run it as a normal function.
|
|
1203
1182
|
ctx = get_local_dbos_context()
|
|
@@ -444,6 +444,7 @@ def configure_db_engine_parameters(
|
|
|
444
444
|
|
|
445
445
|
# Configure user database engine parameters
|
|
446
446
|
app_engine_kwargs: dict[str, Any] = {
|
|
447
|
+
"connect_args": {"application_name": "dbos_transact"},
|
|
447
448
|
"pool_timeout": 30,
|
|
448
449
|
"max_overflow": 0,
|
|
449
450
|
"pool_size": 20,
|
|
@@ -477,8 +478,6 @@ def is_valid_database_url(database_url: str) -> bool:
|
|
|
477
478
|
return True
|
|
478
479
|
url = make_url(database_url)
|
|
479
480
|
required_fields = [
|
|
480
|
-
("username", "Username must be specified in the connection URL"),
|
|
481
|
-
("host", "Host must be specified in the connection URL"),
|
|
482
481
|
("database", "Database name must be specified in the connection URL"),
|
|
483
482
|
]
|
|
484
483
|
for field_name, error_message in required_fields:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import re
|
|
2
2
|
import threading
|
|
3
|
-
from typing import TYPE_CHECKING, Any, Callable, NoReturn
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Callable, Coroutine, NoReturn
|
|
4
4
|
|
|
5
5
|
from confluent_kafka import Consumer, KafkaError, KafkaException
|
|
6
6
|
|
|
@@ -15,7 +15,9 @@ from ._kafka_message import KafkaMessage
|
|
|
15
15
|
from ._logger import dbos_logger
|
|
16
16
|
from ._registrations import get_dbos_func_name
|
|
17
17
|
|
|
18
|
-
_KafkaConsumerWorkflow =
|
|
18
|
+
_KafkaConsumerWorkflow = (
|
|
19
|
+
Callable[[KafkaMessage], None] | Callable[[KafkaMessage], Coroutine[Any, Any, None]]
|
|
20
|
+
)
|
|
19
21
|
|
|
20
22
|
_kafka_queue: Queue
|
|
21
23
|
_in_order_kafka_queues: dict[str, Queue] = {}
|
|
@@ -37,8 +39,8 @@ def _kafka_consumer_loop(
|
|
|
37
39
|
in_order: bool,
|
|
38
40
|
) -> None:
|
|
39
41
|
|
|
40
|
-
def on_error(err: KafkaError) ->
|
|
41
|
-
|
|
42
|
+
def on_error(err: KafkaError) -> None:
|
|
43
|
+
dbos_logger.error(f"Exception in Kafka consumer: {err}")
|
|
42
44
|
|
|
43
45
|
config["error_cb"] = on_error
|
|
44
46
|
if "auto.offset.reset" not in config:
|
|
@@ -2,7 +2,7 @@ import random
|
|
|
2
2
|
import threading
|
|
3
3
|
import traceback
|
|
4
4
|
from datetime import datetime, timezone
|
|
5
|
-
from typing import TYPE_CHECKING, Callable
|
|
5
|
+
from typing import TYPE_CHECKING, Any, Callable, Coroutine
|
|
6
6
|
|
|
7
7
|
from ._logger import dbos_logger
|
|
8
8
|
from ._queue import Queue
|
|
@@ -14,7 +14,10 @@ from ._context import SetWorkflowID
|
|
|
14
14
|
from ._croniter import croniter # type: ignore
|
|
15
15
|
from ._registrations import get_dbos_func_name
|
|
16
16
|
|
|
17
|
-
ScheduledWorkflow =
|
|
17
|
+
ScheduledWorkflow = (
|
|
18
|
+
Callable[[datetime, datetime], None]
|
|
19
|
+
| Callable[[datetime, datetime], Coroutine[Any, Any, None]]
|
|
20
|
+
)
|
|
18
21
|
|
|
19
22
|
|
|
20
23
|
def scheduler_loop(
|
|
@@ -25,9 +25,13 @@ class Serializer(ABC):
|
|
|
25
25
|
class DefaultSerializer(Serializer):
|
|
26
26
|
|
|
27
27
|
def serialize(self, data: Any) -> str:
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
28
|
+
try:
|
|
29
|
+
pickled_data: bytes = pickle.dumps(data)
|
|
30
|
+
encoded_data: str = base64.b64encode(pickled_data).decode("utf-8")
|
|
31
|
+
return encoded_data
|
|
32
|
+
except Exception as e:
|
|
33
|
+
dbos_logger.error(f"Error serializing object: {data}", exc_info=e)
|
|
34
|
+
raise
|
|
31
35
|
|
|
32
36
|
def deserialize(cls, serialized_data: str) -> Any:
|
|
33
37
|
pickled_data: bytes = base64.b64decode(serialized_data)
|
|
@@ -41,7 +41,7 @@ class PostgresSystemDatabase(SystemDatabase):
|
|
|
41
41
|
parameters={"db_name": sysdb_name},
|
|
42
42
|
).scalar():
|
|
43
43
|
dbos_logger.info(f"Creating system database {sysdb_name}")
|
|
44
|
-
conn.execute(sa.text(f
|
|
44
|
+
conn.execute(sa.text(f'CREATE DATABASE "{sysdb_name}"'))
|
|
45
45
|
engine.dispose()
|
|
46
46
|
else:
|
|
47
47
|
# If we were provided an engine, validate it can connect
|
|
@@ -209,7 +209,7 @@ def test_process_config_full():
|
|
|
209
209
|
"max_overflow": 0,
|
|
210
210
|
"pool_size": 20,
|
|
211
211
|
"pool_pre_ping": True,
|
|
212
|
-
"connect_args": {"connect_timeout": 1},
|
|
212
|
+
"connect_args": {"connect_timeout": 1, "application_name": "dbos_transact"},
|
|
213
213
|
}
|
|
214
214
|
assert configFile["database"]["sys_db_engine_kwargs"] == {
|
|
215
215
|
"key": "value",
|
|
@@ -217,7 +217,7 @@ def test_process_config_full():
|
|
|
217
217
|
"max_overflow": 0,
|
|
218
218
|
"pool_size": 27,
|
|
219
219
|
"pool_pre_ping": True,
|
|
220
|
-
"connect_args": {"connect_timeout": 1},
|
|
220
|
+
"connect_args": {"connect_timeout": 1, "application_name": "dbos_transact"},
|
|
221
221
|
}
|
|
222
222
|
assert configFile["runtimeConfig"]["start"] == ["python3 main.py"]
|
|
223
223
|
assert configFile["runtimeConfig"]["admin_port"] == 8001
|
|
@@ -255,7 +255,7 @@ def test_process_config_system_database():
|
|
|
255
255
|
"max_overflow": 0,
|
|
256
256
|
"pool_size": 20,
|
|
257
257
|
"pool_pre_ping": True,
|
|
258
|
-
"connect_args": {"connect_timeout": 1},
|
|
258
|
+
"connect_args": {"connect_timeout": 1, "application_name": "dbos_transact"},
|
|
259
259
|
}
|
|
260
260
|
assert configFile["database"]["sys_db_engine_kwargs"] == {
|
|
261
261
|
"key": "value",
|
|
@@ -263,7 +263,7 @@ def test_process_config_system_database():
|
|
|
263
263
|
"max_overflow": 0,
|
|
264
264
|
"pool_size": 27,
|
|
265
265
|
"pool_pre_ping": True,
|
|
266
|
-
"connect_args": {"connect_timeout": 1},
|
|
266
|
+
"connect_args": {"connect_timeout": 1, "application_name": "dbos_transact"},
|
|
267
267
|
}
|
|
268
268
|
|
|
269
269
|
|
|
@@ -397,14 +397,14 @@ def test_configure_db_engine_parameters_defaults():
|
|
|
397
397
|
"max_overflow": 0,
|
|
398
398
|
"pool_size": 20,
|
|
399
399
|
"pool_pre_ping": True,
|
|
400
|
-
"connect_args": {"connect_timeout": 10},
|
|
400
|
+
"connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
|
|
401
401
|
}
|
|
402
402
|
assert data["sys_db_engine_kwargs"] == {
|
|
403
403
|
"pool_timeout": 30,
|
|
404
404
|
"max_overflow": 0,
|
|
405
405
|
"pool_size": 20,
|
|
406
406
|
"pool_pre_ping": True,
|
|
407
|
-
"connect_args": {"connect_timeout": 10},
|
|
407
|
+
"connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
|
|
408
408
|
}
|
|
409
409
|
|
|
410
410
|
|
|
@@ -419,14 +419,14 @@ def test_configure_db_engine_parameters_custom_sys_db_pool_sizes():
|
|
|
419
419
|
"max_overflow": 0,
|
|
420
420
|
"pool_size": 20,
|
|
421
421
|
"pool_pre_ping": True,
|
|
422
|
-
"connect_args": {"connect_timeout": 10},
|
|
422
|
+
"connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
|
|
423
423
|
}
|
|
424
424
|
assert data["sys_db_engine_kwargs"] == {
|
|
425
425
|
"pool_timeout": 30,
|
|
426
426
|
"max_overflow": 0,
|
|
427
427
|
"pool_size": 35,
|
|
428
428
|
"pool_pre_ping": True,
|
|
429
|
-
"connect_args": {"connect_timeout": 10},
|
|
429
|
+
"connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
|
|
430
430
|
}
|
|
431
431
|
|
|
432
432
|
|
|
@@ -440,7 +440,11 @@ def test_configure_db_engine_parameters_user_kwargs_override():
|
|
|
440
440
|
"pool_pre_ping": True,
|
|
441
441
|
"custom_param": "value",
|
|
442
442
|
"pool_size": 50,
|
|
443
|
-
"connect_args": {
|
|
443
|
+
"connect_args": {
|
|
444
|
+
"connect_timeout": 30,
|
|
445
|
+
"key": "value",
|
|
446
|
+
"application_name": "dbos_transact",
|
|
447
|
+
},
|
|
444
448
|
},
|
|
445
449
|
}
|
|
446
450
|
|
|
@@ -453,7 +457,11 @@ def test_configure_db_engine_parameters_user_kwargs_override():
|
|
|
453
457
|
"pool_pre_ping": True,
|
|
454
458
|
"custom_param": "value",
|
|
455
459
|
"pool_size": 50,
|
|
456
|
-
"connect_args": {
|
|
460
|
+
"connect_args": {
|
|
461
|
+
"connect_timeout": 30,
|
|
462
|
+
"key": "value",
|
|
463
|
+
"application_name": "dbos_transact",
|
|
464
|
+
},
|
|
457
465
|
}
|
|
458
466
|
|
|
459
467
|
# System engine kwargs should use system pool size but same user overrides
|
|
@@ -463,7 +471,11 @@ def test_configure_db_engine_parameters_user_kwargs_override():
|
|
|
463
471
|
"pool_pre_ping": True,
|
|
464
472
|
"custom_param": "value",
|
|
465
473
|
"pool_size": 35,
|
|
466
|
-
"connect_args": {
|
|
474
|
+
"connect_args": {
|
|
475
|
+
"connect_timeout": 30,
|
|
476
|
+
"key": "value",
|
|
477
|
+
"application_name": "dbos_transact",
|
|
478
|
+
},
|
|
467
479
|
}
|
|
468
480
|
|
|
469
481
|
|
|
@@ -487,7 +499,7 @@ def test_configure_db_engine_parameters_user_kwargs_and_db_url_connect_timeout()
|
|
|
487
499
|
"pool_pre_ping": True,
|
|
488
500
|
"custom_param": "value",
|
|
489
501
|
"pool_size": 50,
|
|
490
|
-
"connect_args": {"connect_timeout": 22},
|
|
502
|
+
"connect_args": {"connect_timeout": 22, "application_name": "dbos_transact"},
|
|
491
503
|
}
|
|
492
504
|
|
|
493
505
|
# System engine kwargs should use system pool size but same user overrides
|
|
@@ -497,7 +509,7 @@ def test_configure_db_engine_parameters_user_kwargs_and_db_url_connect_timeout()
|
|
|
497
509
|
"pool_pre_ping": True,
|
|
498
510
|
"custom_param": "value",
|
|
499
511
|
"pool_size": 50,
|
|
500
|
-
"connect_args": {"connect_timeout": 22},
|
|
512
|
+
"connect_args": {"connect_timeout": 22, "application_name": "dbos_transact"},
|
|
501
513
|
}
|
|
502
514
|
|
|
503
515
|
|
|
@@ -556,7 +568,7 @@ def test_configure_db_engine_parameters_user_kwargs_mixed_params():
|
|
|
556
568
|
"pool_pre_ping": True,
|
|
557
569
|
"custom_param": "value",
|
|
558
570
|
"pool_size": 50,
|
|
559
|
-
"connect_args": {"connect_timeout": 10},
|
|
571
|
+
"connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
|
|
560
572
|
}
|
|
561
573
|
|
|
562
574
|
# System engine kwargs should use system pool size but same user overrides
|
|
@@ -566,7 +578,7 @@ def test_configure_db_engine_parameters_user_kwargs_mixed_params():
|
|
|
566
578
|
"pool_pre_ping": True,
|
|
567
579
|
"custom_param": "value",
|
|
568
580
|
"pool_size": 50,
|
|
569
|
-
"connect_args": {"connect_timeout": 10},
|
|
581
|
+
"connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
|
|
570
582
|
}
|
|
571
583
|
|
|
572
584
|
|
|
@@ -581,14 +593,14 @@ def test_configure_db_engine_parameters_empty_user_kwargs():
|
|
|
581
593
|
"max_overflow": 0,
|
|
582
594
|
"pool_size": 20,
|
|
583
595
|
"pool_pre_ping": True,
|
|
584
|
-
"connect_args": {"connect_timeout": 10},
|
|
596
|
+
"connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
|
|
585
597
|
}
|
|
586
598
|
assert data["sys_db_engine_kwargs"] == {
|
|
587
599
|
"pool_timeout": 30,
|
|
588
600
|
"max_overflow": 0,
|
|
589
601
|
"pool_size": 20,
|
|
590
602
|
"pool_pre_ping": True,
|
|
591
|
-
"connect_args": {"connect_timeout": 10},
|
|
603
|
+
"connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
|
|
592
604
|
}
|
|
593
605
|
|
|
594
606
|
|
|
@@ -598,24 +610,6 @@ def test_configure_db_engine_parameters_empty_user_kwargs():
|
|
|
598
610
|
|
|
599
611
|
|
|
600
612
|
def test_process_config_with_wrong_db_url():
|
|
601
|
-
# Missing username
|
|
602
|
-
config: ConfigFile = {
|
|
603
|
-
"name": "some-app",
|
|
604
|
-
"database_url": "postgres://:password@h:1234/dbname",
|
|
605
|
-
}
|
|
606
|
-
with pytest.raises(DBOSInitializationError) as exc_info:
|
|
607
|
-
process_config(data=config)
|
|
608
|
-
assert "Username must be specified in the connection URL" in str(exc_info.value)
|
|
609
|
-
|
|
610
|
-
# Missing host
|
|
611
|
-
config: ConfigFile = {
|
|
612
|
-
"name": "some-app",
|
|
613
|
-
"database_url": "postgres://user:password@:1234/dbname",
|
|
614
|
-
}
|
|
615
|
-
with pytest.raises(DBOSInitializationError) as exc_info:
|
|
616
|
-
process_config(data=config)
|
|
617
|
-
assert "Host must be specified in the connection URL" in str(exc_info.value)
|
|
618
|
-
|
|
619
613
|
# Missing dbname
|
|
620
614
|
config: ConfigFile = {
|
|
621
615
|
"name": "some-app",
|
|
@@ -37,6 +37,7 @@ from dbos._error import (
|
|
|
37
37
|
from dbos._schemas.system_database import SystemSchema
|
|
38
38
|
from dbos._sys_db import GetWorkflowsInput
|
|
39
39
|
from dbos._utils import GlobalParams
|
|
40
|
+
from tests.conftest import using_sqlite
|
|
40
41
|
|
|
41
42
|
|
|
42
43
|
def test_simple_workflow(dbos: DBOS) -> None:
|
|
@@ -1796,6 +1797,65 @@ def test_without_appdb(config: DBOSConfig, cleanup_test_databases: None) -> None
|
|
|
1796
1797
|
assert s["function_name"] == step.__qualname__
|
|
1797
1798
|
|
|
1798
1799
|
|
|
1800
|
+
def test_custom_database(
|
|
1801
|
+
config: DBOSConfig, db_engine: sa.Engine, cleanup_test_databases: None
|
|
1802
|
+
) -> None:
|
|
1803
|
+
DBOS.destroy(destroy_registry=True)
|
|
1804
|
+
assert config["system_database_url"]
|
|
1805
|
+
custom_database = "F8nny_dAtaB@s3@-n@m3.sqlite"
|
|
1806
|
+
url = sa.make_url(config["system_database_url"])
|
|
1807
|
+
url = url.set(database=custom_database)
|
|
1808
|
+
config["system_database_url"] = url.render_as_string(hide_password=False)
|
|
1809
|
+
# Destroy the database if it exists
|
|
1810
|
+
if using_sqlite():
|
|
1811
|
+
parsed_url = sa.make_url(config["system_database_url"])
|
|
1812
|
+
db_path = parsed_url.database
|
|
1813
|
+
assert db_path is not None
|
|
1814
|
+
if os.path.exists(db_path):
|
|
1815
|
+
os.remove(db_path)
|
|
1816
|
+
else:
|
|
1817
|
+
with db_engine.connect() as connection:
|
|
1818
|
+
connection.execution_options(isolation_level="AUTOCOMMIT")
|
|
1819
|
+
connection.execute(
|
|
1820
|
+
sa.text(f'DROP DATABASE IF EXISTS "{custom_database}" WITH (FORCE)')
|
|
1821
|
+
)
|
|
1822
|
+
DBOS(config=config)
|
|
1823
|
+
DBOS.launch()
|
|
1824
|
+
|
|
1825
|
+
key = "key"
|
|
1826
|
+
val = "val"
|
|
1827
|
+
|
|
1828
|
+
@DBOS.transaction()
|
|
1829
|
+
def transaction() -> None:
|
|
1830
|
+
return
|
|
1831
|
+
|
|
1832
|
+
@DBOS.workflow()
|
|
1833
|
+
def recv_workflow() -> Any:
|
|
1834
|
+
transaction()
|
|
1835
|
+
DBOS.set_event(key, val)
|
|
1836
|
+
return DBOS.recv()
|
|
1837
|
+
|
|
1838
|
+
handle = DBOS.start_workflow(recv_workflow)
|
|
1839
|
+
assert DBOS.get_event(handle.workflow_id, key) == val
|
|
1840
|
+
DBOS.send(handle.workflow_id, val)
|
|
1841
|
+
assert handle.get_result() == val
|
|
1842
|
+
assert len(DBOS.list_workflows()) == 2
|
|
1843
|
+
steps = DBOS.list_workflow_steps(handle.workflow_id)
|
|
1844
|
+
assert len(steps) == 4
|
|
1845
|
+
assert "transaction" in steps[0]["function_name"]
|
|
1846
|
+
DBOS.destroy(destroy_registry=True)
|
|
1847
|
+
|
|
1848
|
+
# Test custom database with client
|
|
1849
|
+
client = DBOSClient(
|
|
1850
|
+
system_database_url=config["system_database_url"],
|
|
1851
|
+
application_database_url=config["application_database_url"],
|
|
1852
|
+
)
|
|
1853
|
+
assert len(client.list_workflows()) == 2
|
|
1854
|
+
steps = client.list_workflow_steps(handle.workflow_id)
|
|
1855
|
+
assert len(steps) == 4
|
|
1856
|
+
assert "transaction" in steps[0]["function_name"]
|
|
1857
|
+
|
|
1858
|
+
|
|
1799
1859
|
def test_custom_schema(
|
|
1800
1860
|
config: DBOSConfig, cleanup_test_databases: None, skip_with_sqlite: None
|
|
1801
1861
|
) -> None:
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import threading
|
|
2
2
|
import time
|
|
3
3
|
import uuid
|
|
4
|
-
from typing import cast
|
|
4
|
+
from typing import Any, Generator, cast
|
|
5
5
|
|
|
6
6
|
import pytest
|
|
7
7
|
import sqlalchemy as sa
|
|
@@ -553,3 +553,16 @@ def test_unregistered_workflow(dbos: DBOS, config: DBOSConfig) -> None:
|
|
|
553
553
|
|
|
554
554
|
with pytest.raises(DBOSWorkflowFunctionNotFoundError):
|
|
555
555
|
DBOS._recover_pending_workflows()
|
|
556
|
+
|
|
557
|
+
|
|
558
|
+
def test_nonserializable_return(dbos: DBOS) -> None:
|
|
559
|
+
@DBOS.step()
|
|
560
|
+
def step() -> Generator[str, Any, None]:
|
|
561
|
+
yield "val"
|
|
562
|
+
|
|
563
|
+
@DBOS.workflow()
|
|
564
|
+
def workflow() -> None:
|
|
565
|
+
step()
|
|
566
|
+
|
|
567
|
+
with pytest.raises(TypeError):
|
|
568
|
+
workflow()
|
|
@@ -12,24 +12,25 @@ from dbos import DBOS, KafkaMessage
|
|
|
12
12
|
# Without it, they're automatically skipped.
|
|
13
13
|
# Here's a docker-compose script you can use to set up local Kafka:
|
|
14
14
|
|
|
15
|
-
# version: "3.7"
|
|
16
15
|
# services:
|
|
17
16
|
# broker:
|
|
18
|
-
# image:
|
|
17
|
+
# image: apache/kafka:latest
|
|
19
18
|
# hostname: broker
|
|
20
19
|
# container_name: broker
|
|
21
20
|
# ports:
|
|
22
21
|
# - '9092:9092'
|
|
23
22
|
# environment:
|
|
24
|
-
#
|
|
25
|
-
#
|
|
26
|
-
#
|
|
27
|
-
#
|
|
28
|
-
#
|
|
29
|
-
#
|
|
30
|
-
#
|
|
31
|
-
#
|
|
32
|
-
|
|
23
|
+
# KAFKA_NODE_ID: 1
|
|
24
|
+
# KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9092,CONTROLLER://0.0.0.0:9093
|
|
25
|
+
# KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://127.0.0.1:9092
|
|
26
|
+
# KAFKA_PROCESS_ROLES: broker,controller
|
|
27
|
+
# KAFKA_CONTROLLER_QUORUM_VOTERS: 1@localhost:9093
|
|
28
|
+
# KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
|
|
29
|
+
# KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT
|
|
30
|
+
# KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
|
31
|
+
# KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
|
|
32
|
+
# KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
|
|
33
|
+
# CLUSTER_ID: MkU3OEVBNTcwNTJENDM2Qk
|
|
33
34
|
|
|
34
35
|
NUM_EVENTS = 3
|
|
35
36
|
|
|
@@ -81,12 +82,44 @@ def test_kafka(dbos: DBOS) -> None:
|
|
|
81
82
|
assert b"test message key" in msg.key # type: ignore
|
|
82
83
|
assert b"test message value" in msg.value # type: ignore
|
|
83
84
|
print(msg)
|
|
84
|
-
if kafka_count ==
|
|
85
|
+
if kafka_count == NUM_EVENTS:
|
|
86
|
+
event.set()
|
|
87
|
+
|
|
88
|
+
wait = event.wait(timeout=10)
|
|
89
|
+
assert wait
|
|
90
|
+
assert kafka_count == NUM_EVENTS
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def test_kafka_async(dbos: DBOS) -> None:
|
|
94
|
+
event = threading.Event()
|
|
95
|
+
kafka_count = 0
|
|
96
|
+
server = "localhost:9092"
|
|
97
|
+
topic = f"dbos-kafka-{random.randrange(1_000_000_000)}"
|
|
98
|
+
|
|
99
|
+
if not send_test_messages(server, topic):
|
|
100
|
+
pytest.skip("Kafka not available")
|
|
101
|
+
|
|
102
|
+
@DBOS.kafka_consumer(
|
|
103
|
+
{
|
|
104
|
+
"bootstrap.servers": server,
|
|
105
|
+
"group.id": "dbos-test",
|
|
106
|
+
"auto.offset.reset": "earliest",
|
|
107
|
+
},
|
|
108
|
+
[topic],
|
|
109
|
+
)
|
|
110
|
+
@DBOS.workflow()
|
|
111
|
+
async def test_kafka_workflow(msg: KafkaMessage) -> None:
|
|
112
|
+
nonlocal kafka_count
|
|
113
|
+
kafka_count += 1
|
|
114
|
+
assert b"test message key" in msg.key # type: ignore
|
|
115
|
+
assert b"test message value" in msg.value # type: ignore
|
|
116
|
+
print(msg)
|
|
117
|
+
if kafka_count == NUM_EVENTS:
|
|
85
118
|
event.set()
|
|
86
119
|
|
|
87
120
|
wait = event.wait(timeout=10)
|
|
88
121
|
assert wait
|
|
89
|
-
assert kafka_count ==
|
|
122
|
+
assert kafka_count == NUM_EVENTS
|
|
90
123
|
|
|
91
124
|
|
|
92
125
|
def test_kafka_in_order(dbos: DBOS) -> None:
|
|
@@ -114,12 +147,12 @@ def test_kafka_in_order(dbos: DBOS) -> None:
|
|
|
114
147
|
kafka_count += 1
|
|
115
148
|
assert f"test message key {kafka_count - 1}".encode() == msg.key
|
|
116
149
|
print(msg)
|
|
117
|
-
if kafka_count ==
|
|
150
|
+
if kafka_count == NUM_EVENTS:
|
|
118
151
|
event.set()
|
|
119
152
|
|
|
120
153
|
wait = event.wait(timeout=15)
|
|
121
154
|
assert wait
|
|
122
|
-
assert kafka_count ==
|
|
155
|
+
assert kafka_count == NUM_EVENTS
|
|
123
156
|
time.sleep(2) # Wait for things to clean up
|
|
124
157
|
|
|
125
158
|
|
|
@@ -150,9 +183,9 @@ def test_kafka_no_groupid(dbos: DBOS) -> None:
|
|
|
150
183
|
assert b"test message key" in msg.key # type: ignore
|
|
151
184
|
assert b"test message value" in msg.value # type: ignore
|
|
152
185
|
print(msg)
|
|
153
|
-
if kafka_count ==
|
|
186
|
+
if kafka_count == NUM_EVENTS * 2:
|
|
154
187
|
event.set()
|
|
155
188
|
|
|
156
189
|
wait = event.wait(timeout=10)
|
|
157
190
|
assert wait
|
|
158
|
-
assert kafka_count ==
|
|
191
|
+
assert kafka_count == NUM_EVENTS * 2
|
|
@@ -105,6 +105,19 @@ def test_scheduled_workflow(dbos: DBOS) -> None:
|
|
|
105
105
|
assert wf_counter > 1 and wf_counter <= 5
|
|
106
106
|
|
|
107
107
|
|
|
108
|
+
def test_async_scheduled_workflow(dbos: DBOS) -> None:
|
|
109
|
+
wf_counter: int = 0
|
|
110
|
+
|
|
111
|
+
@DBOS.scheduled("* * * * * *")
|
|
112
|
+
@DBOS.workflow()
|
|
113
|
+
async def test_workflow(scheduled: datetime, actual: datetime) -> None:
|
|
114
|
+
nonlocal wf_counter
|
|
115
|
+
wf_counter += 1
|
|
116
|
+
|
|
117
|
+
time.sleep(5)
|
|
118
|
+
assert wf_counter > 1 and wf_counter <= 5
|
|
119
|
+
|
|
120
|
+
|
|
108
121
|
def test_appdb_downtime(dbos: DBOS, skip_with_sqlite: None) -> None:
|
|
109
122
|
wf_counter: int = 0
|
|
110
123
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dbos-2.3.0a1 → dbos-2.3.0a3}/dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|