dbos 1.15.0a4__tar.gz → 1.15.0a6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- {dbos-1.15.0a4 → dbos-1.15.0a6}/PKG-INFO +1 -1
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_client.py +4 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_dbos.py +5 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_dbos_config.py +7 -14
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_debouncer.py +7 -5
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_queue.py +2 -2
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_sys_db.py +21 -2
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_sys_db_postgres.py +37 -51
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_sys_db_sqlite.py +0 -2
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/cli/migration.py +1 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/pyproject.toml +1 -1
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/conftest.py +1 -1
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_admin_server.py +4 -4
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_client.py +6 -8
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_dbos.py +69 -6
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_debouncer.py +1 -1
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_failures.py +3 -5
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_queue.py +4 -1
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_schema_migration.py +2 -1
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_spans.py +7 -7
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_workflow_introspection.py +5 -5
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_workflow_management.py +2 -5
- {dbos-1.15.0a4 → dbos-1.15.0a6}/LICENSE +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/README.md +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/__init__.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/__main__.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_admin_server.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_app_db.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_classproperty.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_conductor/conductor.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_conductor/protocol.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_context.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_core.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_croniter.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_debug.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_docker_pg_helper.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_error.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_event_loop.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_fastapi.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_flask.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_kafka.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_kafka_message.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_logger.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_migration.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_outcome.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_recovery.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_registrations.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_roles.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_scheduler.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_schemas/__init__.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_schemas/application_database.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_schemas/system_database.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_serialization.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_tracer.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_utils.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_workflow_commands.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/cli/_github_init.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/cli/_template_init.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/cli/cli.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/dbos-config.schema.json +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/py.typed +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/__init__.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/atexit_no_ctor.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/atexit_no_launch.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/classdefs.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/client_collateral.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/client_worker.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/dupname_classdefs1.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/dupname_classdefsa.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/more_classdefs.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/queuedworkflow.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/script_without_fastapi.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_async.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_async_workflow_management.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_classdecorators.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_cli.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_concurrency.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_config.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_croniter.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_debug.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_docker_secrets.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_fastapi.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_fastapi_roles.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_flask.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_kafka.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_outcome.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_package.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_scheduler.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_singleton.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_sqlalchemy.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/tests/test_streaming.py +0 -0
- {dbos-1.15.0a4 → dbos-1.15.0a6}/version/__init__.py +0 -0
|
@@ -14,6 +14,8 @@ from typing import (
|
|
|
14
14
|
Union,
|
|
15
15
|
)
|
|
16
16
|
|
|
17
|
+
import sqlalchemy as sa
|
|
18
|
+
|
|
17
19
|
from dbos import _serialization
|
|
18
20
|
from dbos._app_db import ApplicationDatabase
|
|
19
21
|
from dbos._context import MaxPriority, MinPriority
|
|
@@ -122,6 +124,7 @@ class DBOSClient:
|
|
|
122
124
|
database_url: Optional[str] = None, # DEPRECATED
|
|
123
125
|
*,
|
|
124
126
|
system_database_url: Optional[str] = None,
|
|
127
|
+
system_database_engine: Optional[sa.Engine] = None,
|
|
125
128
|
application_database_url: Optional[str] = None,
|
|
126
129
|
dbos_system_schema: Optional[str] = "dbos",
|
|
127
130
|
):
|
|
@@ -145,6 +148,7 @@ class DBOSClient:
|
|
|
145
148
|
"max_overflow": 0,
|
|
146
149
|
"pool_size": 2,
|
|
147
150
|
},
|
|
151
|
+
engine=system_database_engine,
|
|
148
152
|
schema=dbos_system_schema,
|
|
149
153
|
)
|
|
150
154
|
self._sys_db.check_connection()
|
|
@@ -449,6 +449,7 @@ class DBOS:
|
|
|
449
449
|
self._sys_db_field = SystemDatabase.create(
|
|
450
450
|
system_database_url=get_system_database_url(self._config),
|
|
451
451
|
engine_kwargs=self._config["database"]["sys_db_engine_kwargs"],
|
|
452
|
+
engine=self._config["system_database_engine"],
|
|
452
453
|
debug_mode=debug_mode,
|
|
453
454
|
schema=schema,
|
|
454
455
|
)
|
|
@@ -1222,6 +1223,10 @@ class DBOS:
|
|
|
1222
1223
|
async def list_workflow_steps_async(cls, workflow_id: str) -> List[StepInfo]:
|
|
1223
1224
|
await cls._configure_asyncio_thread_pool()
|
|
1224
1225
|
return await asyncio.to_thread(cls.list_workflow_steps, workflow_id)
|
|
1226
|
+
|
|
1227
|
+
@classproperty
|
|
1228
|
+
def application_version(cls) -> str:
|
|
1229
|
+
return GlobalParams.app_version
|
|
1225
1230
|
|
|
1226
1231
|
@classproperty
|
|
1227
1232
|
def logger(cls) -> Logger:
|
|
@@ -3,6 +3,7 @@ import re
|
|
|
3
3
|
from importlib import resources
|
|
4
4
|
from typing import Any, Dict, List, Optional, TypedDict, cast
|
|
5
5
|
|
|
6
|
+
import sqlalchemy as sa
|
|
6
7
|
import yaml
|
|
7
8
|
from sqlalchemy import make_url
|
|
8
9
|
|
|
@@ -34,6 +35,7 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
34
35
|
executor_id (str): Executor ID, used to identify the application instance in distributed environments
|
|
35
36
|
dbos_system_schema (str): Schema name for DBOS system tables. Defaults to "dbos".
|
|
36
37
|
enable_otlp (bool): If True, enable built-in DBOS OTLP tracing and logging.
|
|
38
|
+
system_database_engine (sa.Engine): A custom system database engine. If provided, DBOS will not create an engine but use this instead.
|
|
37
39
|
"""
|
|
38
40
|
|
|
39
41
|
name: str
|
|
@@ -52,6 +54,7 @@ class DBOSConfig(TypedDict, total=False):
|
|
|
52
54
|
executor_id: Optional[str]
|
|
53
55
|
dbos_system_schema: Optional[str]
|
|
54
56
|
enable_otlp: Optional[bool]
|
|
57
|
+
system_database_engine: Optional[sa.Engine]
|
|
55
58
|
|
|
56
59
|
|
|
57
60
|
class RuntimeConfig(TypedDict, total=False):
|
|
@@ -97,20 +100,7 @@ class TelemetryConfig(TypedDict, total=False):
|
|
|
97
100
|
class ConfigFile(TypedDict, total=False):
|
|
98
101
|
"""
|
|
99
102
|
Data structure containing the DBOS Configuration.
|
|
100
|
-
|
|
101
|
-
This configuration data is typically loaded from `dbos-config.yaml`.
|
|
102
|
-
See `https://docs.dbos.dev/python/reference/configuration#dbos-configuration-file`
|
|
103
|
-
|
|
104
|
-
Attributes:
|
|
105
|
-
name (str): Application name
|
|
106
|
-
runtimeConfig (RuntimeConfig): Configuration for DBOS Cloud
|
|
107
|
-
database (DatabaseConfig): Configure pool sizes, migrate commands
|
|
108
|
-
database_url (str): Application database URL
|
|
109
|
-
system_database_url (str): System database URL
|
|
110
|
-
telemetry (TelemetryConfig): Configuration for tracing / logging
|
|
111
|
-
env (Dict[str,str]): Environment variables
|
|
112
|
-
dbos_system_schema (str): Schema name for DBOS system tables. Defaults to "dbos".
|
|
113
|
-
|
|
103
|
+
The DBOSConfig object is parsed into this.
|
|
114
104
|
"""
|
|
115
105
|
|
|
116
106
|
name: str
|
|
@@ -120,6 +110,7 @@ class ConfigFile(TypedDict, total=False):
|
|
|
120
110
|
system_database_url: Optional[str]
|
|
121
111
|
telemetry: Optional[TelemetryConfig]
|
|
122
112
|
env: Dict[str, str]
|
|
113
|
+
system_database_engine: Optional[sa.Engine]
|
|
123
114
|
dbos_system_schema: Optional[str]
|
|
124
115
|
|
|
125
116
|
|
|
@@ -188,6 +179,8 @@ def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
|
|
|
188
179
|
if telemetry:
|
|
189
180
|
translated_config["telemetry"] = telemetry
|
|
190
181
|
|
|
182
|
+
translated_config["system_database_engine"] = config.get("system_database_engine")
|
|
183
|
+
|
|
191
184
|
return translated_config
|
|
192
185
|
|
|
193
186
|
|
|
@@ -89,11 +89,13 @@ def debouncer_workflow(
|
|
|
89
89
|
# Every time the debounced workflow is called, a message is sent to this workflow.
|
|
90
90
|
# It waits until debounce_period_sec have passed since the last message or until
|
|
91
91
|
# debounce_timeout_sec has elapsed.
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
92
|
+
def get_debounce_deadline_epoch_sec() -> float:
|
|
93
|
+
return (
|
|
94
|
+
time.time() + options["debounce_timeout_sec"]
|
|
95
|
+
if options["debounce_timeout_sec"]
|
|
96
|
+
else math.inf
|
|
97
|
+
)
|
|
98
|
+
debounce_deadline_epoch_sec = dbos._sys_db.call_function_as_step(get_debounce_deadline_epoch_sec, "get_debounce_deadline_epoch_sec")
|
|
97
99
|
debounce_period_sec = initial_debounce_period_sec
|
|
98
100
|
while time.time() < debounce_deadline_epoch_sec:
|
|
99
101
|
time_until_deadline = max(debounce_deadline_epoch_sec - time.time(), 0)
|
|
@@ -61,7 +61,7 @@ class Queue:
|
|
|
61
61
|
|
|
62
62
|
registry = _get_or_create_dbos_registry()
|
|
63
63
|
if self.name in registry.queue_info_map and self.name != INTERNAL_QUEUE_NAME:
|
|
64
|
-
|
|
64
|
+
raise Exception(f"Queue {name} has already been declared")
|
|
65
65
|
registry.queue_info_map[self.name] = self
|
|
66
66
|
|
|
67
67
|
def enqueue(
|
|
@@ -75,7 +75,7 @@ class Queue:
|
|
|
75
75
|
and context.priority is not None
|
|
76
76
|
and not self.priority_enabled
|
|
77
77
|
):
|
|
78
|
-
|
|
78
|
+
raise Exception(
|
|
79
79
|
f"Priority is not enabled for queue {self.name}. Setting priority will not have any effect."
|
|
80
80
|
)
|
|
81
81
|
|
|
@@ -346,17 +346,32 @@ class SystemDatabase(ABC):
|
|
|
346
346
|
*,
|
|
347
347
|
system_database_url: str,
|
|
348
348
|
engine_kwargs: Dict[str, Any],
|
|
349
|
+
engine: Optional[sa.Engine],
|
|
350
|
+
schema: Optional[str],
|
|
349
351
|
debug_mode: bool = False,
|
|
350
352
|
):
|
|
351
353
|
import sqlalchemy.dialects.postgresql as pg
|
|
352
354
|
import sqlalchemy.dialects.sqlite as sq
|
|
353
355
|
|
|
354
356
|
self.dialect = sq if system_database_url.startswith("sqlite") else pg
|
|
355
|
-
|
|
357
|
+
|
|
358
|
+
if system_database_url.startswith("sqlite"):
|
|
359
|
+
self.schema = None
|
|
360
|
+
else:
|
|
361
|
+
self.schema = schema if schema else "dbos"
|
|
362
|
+
SystemSchema.set_schema(self.schema)
|
|
363
|
+
|
|
364
|
+
if engine:
|
|
365
|
+
self.engine = engine
|
|
366
|
+
self.created_engine = False
|
|
367
|
+
else:
|
|
368
|
+
self.engine = self._create_engine(system_database_url, engine_kwargs)
|
|
369
|
+
self.created_engine = True
|
|
356
370
|
self._engine_kwargs = engine_kwargs
|
|
357
371
|
|
|
358
372
|
self.notifications_map = ThreadSafeConditionDict()
|
|
359
373
|
self.workflow_events_map = ThreadSafeConditionDict()
|
|
374
|
+
self._listener_thread_lock = threading.Lock()
|
|
360
375
|
|
|
361
376
|
# Now we can run background processes
|
|
362
377
|
self._run_background_processes = True
|
|
@@ -1443,6 +1458,7 @@ class SystemDatabase(ABC):
|
|
|
1443
1458
|
def create(
|
|
1444
1459
|
system_database_url: str,
|
|
1445
1460
|
engine_kwargs: Dict[str, Any],
|
|
1461
|
+
engine: Optional[sa.Engine],
|
|
1446
1462
|
schema: Optional[str],
|
|
1447
1463
|
debug_mode: bool = False,
|
|
1448
1464
|
) -> "SystemDatabase":
|
|
@@ -1453,6 +1469,8 @@ class SystemDatabase(ABC):
|
|
|
1453
1469
|
return SQLiteSystemDatabase(
|
|
1454
1470
|
system_database_url=system_database_url,
|
|
1455
1471
|
engine_kwargs=engine_kwargs,
|
|
1472
|
+
engine=engine,
|
|
1473
|
+
schema=schema,
|
|
1456
1474
|
debug_mode=debug_mode,
|
|
1457
1475
|
)
|
|
1458
1476
|
else:
|
|
@@ -1461,8 +1479,9 @@ class SystemDatabase(ABC):
|
|
|
1461
1479
|
return PostgresSystemDatabase(
|
|
1462
1480
|
system_database_url=system_database_url,
|
|
1463
1481
|
engine_kwargs=engine_kwargs,
|
|
1464
|
-
|
|
1482
|
+
engine=engine,
|
|
1465
1483
|
schema=schema,
|
|
1484
|
+
debug_mode=debug_mode,
|
|
1466
1485
|
)
|
|
1467
1486
|
|
|
1468
1487
|
@db_retry()
|
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
import time
|
|
2
|
-
from typing import Any, Dict, Optional
|
|
2
|
+
from typing import Any, Dict, Optional, cast
|
|
3
3
|
|
|
4
4
|
import psycopg
|
|
5
5
|
import sqlalchemy as sa
|
|
6
6
|
from sqlalchemy.exc import DBAPIError
|
|
7
7
|
|
|
8
8
|
from dbos._migration import ensure_dbos_schema, run_dbos_migrations
|
|
9
|
-
from dbos._schemas.system_database import SystemSchema
|
|
10
9
|
|
|
11
10
|
from ._logger import dbos_logger
|
|
12
11
|
from ._sys_db import SystemDatabase
|
|
@@ -15,25 +14,7 @@ from ._sys_db import SystemDatabase
|
|
|
15
14
|
class PostgresSystemDatabase(SystemDatabase):
|
|
16
15
|
"""PostgreSQL-specific implementation of SystemDatabase."""
|
|
17
16
|
|
|
18
|
-
|
|
19
|
-
self,
|
|
20
|
-
*,
|
|
21
|
-
system_database_url: str,
|
|
22
|
-
engine_kwargs: Dict[str, Any],
|
|
23
|
-
schema: Optional[str],
|
|
24
|
-
debug_mode: bool = False,
|
|
25
|
-
):
|
|
26
|
-
super().__init__(
|
|
27
|
-
system_database_url=system_database_url,
|
|
28
|
-
engine_kwargs=engine_kwargs,
|
|
29
|
-
debug_mode=debug_mode,
|
|
30
|
-
)
|
|
31
|
-
if schema is None:
|
|
32
|
-
self.schema = "dbos"
|
|
33
|
-
else:
|
|
34
|
-
self.schema = schema
|
|
35
|
-
SystemSchema.set_schema(self.schema)
|
|
36
|
-
self.notification_conn: Optional[psycopg.connection.Connection] = None
|
|
17
|
+
notification_conn: Optional[sa.PoolProxiedConnection] = None
|
|
37
18
|
|
|
38
19
|
def _create_engine(
|
|
39
20
|
self, system_database_url: str, engine_kwargs: Dict[str, Any]
|
|
@@ -48,27 +29,35 @@ class PostgresSystemDatabase(SystemDatabase):
|
|
|
48
29
|
return
|
|
49
30
|
system_db_url = self.engine.url
|
|
50
31
|
sysdb_name = system_db_url.database
|
|
51
|
-
#
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
32
|
+
# Unless we were provided an engine, if the system database does not already exist, create it
|
|
33
|
+
if self.created_engine:
|
|
34
|
+
engine = sa.create_engine(
|
|
35
|
+
system_db_url.set(database="postgres"), **self._engine_kwargs
|
|
36
|
+
)
|
|
37
|
+
with engine.connect() as conn:
|
|
38
|
+
conn.execution_options(isolation_level="AUTOCOMMIT")
|
|
39
|
+
if not conn.execute(
|
|
40
|
+
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
|
41
|
+
parameters={"db_name": sysdb_name},
|
|
42
|
+
).scalar():
|
|
43
|
+
dbos_logger.info(f"Creating system database {sysdb_name}")
|
|
44
|
+
conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
|
|
45
|
+
engine.dispose()
|
|
46
|
+
else:
|
|
47
|
+
# If we were provided an engine, validate it can connect
|
|
48
|
+
with self.engine.connect() as conn:
|
|
49
|
+
conn.execute(sa.text("SELECT 1"))
|
|
64
50
|
|
|
51
|
+
assert self.schema
|
|
65
52
|
ensure_dbos_schema(self.engine, self.schema)
|
|
66
53
|
run_dbos_migrations(self.engine, self.schema)
|
|
67
54
|
|
|
68
55
|
def _cleanup_connections(self) -> None:
|
|
69
56
|
"""Clean up PostgreSQL-specific connections."""
|
|
70
|
-
|
|
71
|
-
self.notification_conn.
|
|
57
|
+
with self._listener_thread_lock:
|
|
58
|
+
if self.notification_conn and self.notification_conn.dbapi_connection:
|
|
59
|
+
self.notification_conn.dbapi_connection.close()
|
|
60
|
+
self.notification_conn.invalidate()
|
|
72
61
|
|
|
73
62
|
def _is_unique_constraint_violation(self, dbapi_error: DBAPIError) -> bool:
|
|
74
63
|
"""Check if the error is a unique constraint violation in PostgreSQL."""
|
|
@@ -111,20 +100,18 @@ class PostgresSystemDatabase(SystemDatabase):
|
|
|
111
100
|
"""Listen for PostgreSQL notifications using psycopg."""
|
|
112
101
|
while self._run_background_processes:
|
|
113
102
|
try:
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
self.notification_conn.execute("LISTEN dbos_workflow_events_channel")
|
|
125
|
-
|
|
103
|
+
with self._listener_thread_lock:
|
|
104
|
+
self.notification_conn = self.engine.raw_connection()
|
|
105
|
+
self.notification_conn.detach()
|
|
106
|
+
psycopg_conn = cast(
|
|
107
|
+
psycopg.connection.Connection, self.notification_conn
|
|
108
|
+
)
|
|
109
|
+
psycopg_conn.set_autocommit(True)
|
|
110
|
+
|
|
111
|
+
psycopg_conn.execute("LISTEN dbos_notifications_channel")
|
|
112
|
+
psycopg_conn.execute("LISTEN dbos_workflow_events_channel")
|
|
126
113
|
while self._run_background_processes:
|
|
127
|
-
gen =
|
|
114
|
+
gen = psycopg_conn.notifies()
|
|
128
115
|
for notify in gen:
|
|
129
116
|
channel = notify.channel
|
|
130
117
|
dbos_logger.debug(
|
|
@@ -162,5 +149,4 @@ class PostgresSystemDatabase(SystemDatabase):
|
|
|
162
149
|
time.sleep(1)
|
|
163
150
|
# Then the loop will try to reconnect and restart the listener
|
|
164
151
|
finally:
|
|
165
|
-
|
|
166
|
-
self.notification_conn.close()
|
|
152
|
+
self._cleanup_connections()
|
|
@@ -6,7 +6,6 @@ import sqlalchemy as sa
|
|
|
6
6
|
from sqlalchemy.exc import DBAPIError
|
|
7
7
|
|
|
8
8
|
from dbos._migration import sqlite_migrations
|
|
9
|
-
from dbos._schemas.system_database import SystemSchema
|
|
10
9
|
|
|
11
10
|
from ._logger import dbos_logger
|
|
12
11
|
from ._sys_db import SystemDatabase
|
|
@@ -19,7 +18,6 @@ class SQLiteSystemDatabase(SystemDatabase):
|
|
|
19
18
|
self, system_database_url: str, engine_kwargs: Dict[str, Any]
|
|
20
19
|
) -> sa.Engine:
|
|
21
20
|
"""Create a SQLite engine."""
|
|
22
|
-
SystemSchema.set_schema(None)
|
|
23
21
|
return sa.create_engine(system_database_url)
|
|
24
22
|
|
|
25
23
|
def run_migrations(self) -> None:
|
|
@@ -52,7 +52,7 @@ def default_config() -> DBOSConfig:
|
|
|
52
52
|
"system_database_url": (
|
|
53
53
|
"sqlite:///test.sqlite"
|
|
54
54
|
if using_sqlite()
|
|
55
|
-
else f"postgresql://postgres:{quote(os.environ.get('PGPASSWORD', 'dbos'), safe='')}@localhost:5432/dbostestpy_dbos_sys"
|
|
55
|
+
else f"postgresql+psycopg://postgres:{quote(os.environ.get('PGPASSWORD', 'dbos'), safe='')}@localhost:5432/dbostestpy_dbos_sys"
|
|
56
56
|
),
|
|
57
57
|
"enable_otlp": True,
|
|
58
58
|
}
|
|
@@ -520,7 +520,7 @@ def test_list_workflows(dbos: DBOS, skip_with_sqlite_imprecise_time: None) -> No
|
|
|
520
520
|
assert workflows[0]["CreatedAt"] is not None and len(workflows[0]["CreatedAt"]) > 0
|
|
521
521
|
assert workflows[0]["UpdatedAt"] is not None and len(workflows[0]["UpdatedAt"]) > 0
|
|
522
522
|
assert workflows[0]["QueueName"] is None
|
|
523
|
-
assert workflows[0]["ApplicationVersion"] ==
|
|
523
|
+
assert workflows[0]["ApplicationVersion"] == DBOS.application_version
|
|
524
524
|
assert workflows[0]["ExecutorID"] == GlobalParams.executor_id
|
|
525
525
|
|
|
526
526
|
# Only load input and output as requested
|
|
@@ -620,7 +620,7 @@ def test_list_workflows(dbos: DBOS, skip_with_sqlite_imprecise_time: None) -> No
|
|
|
620
620
|
assert len(workflows) == 2
|
|
621
621
|
|
|
622
622
|
filters = {
|
|
623
|
-
"application_version":
|
|
623
|
+
"application_version": DBOS.application_version,
|
|
624
624
|
}
|
|
625
625
|
response = requests.post("http://localhost:3001/workflows", json=filters, timeout=5)
|
|
626
626
|
assert response.status_code == 200
|
|
@@ -699,7 +699,7 @@ def test_get_workflow_by_id(dbos: DBOS) -> None:
|
|
|
699
699
|
workflow_data["UpdatedAt"] is not None and len(workflow_data["UpdatedAt"]) > 0
|
|
700
700
|
)
|
|
701
701
|
assert workflow_data["QueueName"] is None
|
|
702
|
-
assert workflow_data["ApplicationVersion"] ==
|
|
702
|
+
assert workflow_data["ApplicationVersion"] == DBOS.application_version
|
|
703
703
|
assert workflow_data["ExecutorID"] == GlobalParams.executor_id
|
|
704
704
|
|
|
705
705
|
# Test GET /workflows/:workflow_id for a non-existing workflow
|
|
@@ -812,7 +812,7 @@ def test_queued_workflows_endpoint(
|
|
|
812
812
|
and len(queued_workflows[0]["UpdatedAt"]) > 0
|
|
813
813
|
)
|
|
814
814
|
assert queued_workflows[0]["QueueName"] == test_queue1.name
|
|
815
|
-
assert queued_workflows[0]["ApplicationVersion"] ==
|
|
815
|
+
assert queued_workflows[0]["ApplicationVersion"] == DBOS.application_version
|
|
816
816
|
assert queued_workflows[0]["ExecutorID"] == GlobalParams.executor_id
|
|
817
817
|
|
|
818
818
|
# Verify sort_desc inverts the order
|
|
@@ -14,8 +14,6 @@ from sqlalchemy.exc import DBAPIError
|
|
|
14
14
|
|
|
15
15
|
from dbos import DBOS, DBOSClient, DBOSConfig, EnqueueOptions, SetWorkflowID
|
|
16
16
|
from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
|
|
17
|
-
from dbos._sys_db import SystemDatabase
|
|
18
|
-
from dbos._utils import GlobalParams
|
|
19
17
|
from tests import client_collateral
|
|
20
18
|
from tests.client_collateral import event_test, retrieve_test, send_test
|
|
21
19
|
|
|
@@ -125,7 +123,7 @@ def test_client_enqueue_appver_not_set(dbos: DBOS, client: DBOSClient) -> None:
|
|
|
125
123
|
assert wf_status is not None
|
|
126
124
|
assert wf_status.status == "SUCCESS"
|
|
127
125
|
assert wf_status.name == "enqueue_test"
|
|
128
|
-
assert wf_status.app_version ==
|
|
126
|
+
assert wf_status.app_version == DBOS.application_version
|
|
129
127
|
|
|
130
128
|
|
|
131
129
|
def test_client_enqueue_appver_set(dbos: DBOS, client: DBOSClient) -> None:
|
|
@@ -138,7 +136,7 @@ def test_client_enqueue_appver_set(dbos: DBOS, client: DBOSClient) -> None:
|
|
|
138
136
|
"queue_name": "test_queue",
|
|
139
137
|
"workflow_name": "enqueue_test",
|
|
140
138
|
"workflow_id": wfid,
|
|
141
|
-
"app_version":
|
|
139
|
+
"app_version": DBOS.application_version,
|
|
142
140
|
}
|
|
143
141
|
|
|
144
142
|
client.enqueue(options, 42, "test", johnDoe)
|
|
@@ -151,7 +149,7 @@ def test_client_enqueue_appver_set(dbos: DBOS, client: DBOSClient) -> None:
|
|
|
151
149
|
assert wf_status is not None
|
|
152
150
|
assert wf_status.status == "SUCCESS"
|
|
153
151
|
assert wf_status.name == "enqueue_test"
|
|
154
|
-
assert wf_status.app_version ==
|
|
152
|
+
assert wf_status.app_version == DBOS.application_version
|
|
155
153
|
|
|
156
154
|
|
|
157
155
|
def test_client_enqueue_wrong_appver(dbos: DBOS, client: DBOSClient) -> None:
|
|
@@ -205,7 +203,7 @@ def test_client_enqueue_idempotent(config: DBOSConfig, client: DBOSClient) -> No
|
|
|
205
203
|
assert wf_status is not None
|
|
206
204
|
assert wf_status.status == "SUCCESS"
|
|
207
205
|
assert wf_status.name == "enqueue_test"
|
|
208
|
-
assert wf_status.app_version ==
|
|
206
|
+
assert wf_status.app_version == DBOS.application_version
|
|
209
207
|
|
|
210
208
|
DBOS.destroy(destroy_registry=True)
|
|
211
209
|
|
|
@@ -272,7 +270,7 @@ def test_client_send_idempotent(
|
|
|
272
270
|
idempotency_key = f"test-idempotency-{now}"
|
|
273
271
|
sendWFID = f"{wfid}-{idempotency_key}"
|
|
274
272
|
|
|
275
|
-
run_send_worker(wfid, topic,
|
|
273
|
+
run_send_worker(wfid, topic, DBOS.application_version)
|
|
276
274
|
|
|
277
275
|
client.send(wfid, message, topic, idempotency_key)
|
|
278
276
|
client.send(wfid, message, topic, idempotency_key)
|
|
@@ -315,7 +313,7 @@ def test_client_send_failure(
|
|
|
315
313
|
idempotency_key = f"test-idempotency-{now}"
|
|
316
314
|
sendWFID = f"{wfid}-{idempotency_key}"
|
|
317
315
|
|
|
318
|
-
run_send_worker(wfid, topic,
|
|
316
|
+
run_send_worker(wfid, topic, DBOS.application_version)
|
|
319
317
|
|
|
320
318
|
client.send(wfid, message, topic, idempotency_key)
|
|
321
319
|
|
|
@@ -11,6 +11,7 @@ from typing import Any, Optional
|
|
|
11
11
|
|
|
12
12
|
import pytest
|
|
13
13
|
import sqlalchemy as sa
|
|
14
|
+
from sqlalchemy.exc import OperationalError
|
|
14
15
|
|
|
15
16
|
# Public API
|
|
16
17
|
from dbos import (
|
|
@@ -1371,12 +1372,12 @@ def test_app_version(config: DBOSConfig) -> None:
|
|
|
1371
1372
|
DBOS.launch()
|
|
1372
1373
|
|
|
1373
1374
|
# Verify that app version is correctly set to a hex string
|
|
1374
|
-
app_version =
|
|
1375
|
+
app_version = DBOS.application_version
|
|
1375
1376
|
assert len(app_version) > 0
|
|
1376
1377
|
assert is_hex(app_version)
|
|
1377
1378
|
|
|
1378
1379
|
DBOS.destroy(destroy_registry=True)
|
|
1379
|
-
assert
|
|
1380
|
+
assert DBOS.application_version == ""
|
|
1380
1381
|
dbos = DBOS(config=config)
|
|
1381
1382
|
|
|
1382
1383
|
@DBOS.workflow()
|
|
@@ -1390,7 +1391,7 @@ def test_app_version(config: DBOSConfig) -> None:
|
|
|
1390
1391
|
DBOS.launch()
|
|
1391
1392
|
|
|
1392
1393
|
# Verify stability--the same workflow source produces the same app version.
|
|
1393
|
-
assert
|
|
1394
|
+
assert DBOS.application_version == app_version
|
|
1394
1395
|
|
|
1395
1396
|
DBOS.destroy(destroy_registry=True)
|
|
1396
1397
|
dbos = DBOS(config=config)
|
|
@@ -1401,7 +1402,7 @@ def test_app_version(config: DBOSConfig) -> None:
|
|
|
1401
1402
|
|
|
1402
1403
|
# Verify that changing the workflow source changes the workflow version
|
|
1403
1404
|
DBOS.launch()
|
|
1404
|
-
assert
|
|
1405
|
+
assert DBOS.application_version != app_version
|
|
1405
1406
|
|
|
1406
1407
|
# Verify that version can be overriden with an environment variable
|
|
1407
1408
|
app_version = str(uuid.uuid4())
|
|
@@ -1415,7 +1416,7 @@ def test_app_version(config: DBOSConfig) -> None:
|
|
|
1415
1416
|
return x
|
|
1416
1417
|
|
|
1417
1418
|
DBOS.launch()
|
|
1418
|
-
assert
|
|
1419
|
+
assert DBOS.application_version == app_version
|
|
1419
1420
|
|
|
1420
1421
|
del os.environ["DBOS__APPVERSION"]
|
|
1421
1422
|
|
|
@@ -1434,7 +1435,7 @@ def test_app_version(config: DBOSConfig) -> None:
|
|
|
1434
1435
|
return DBOS.workflow_id
|
|
1435
1436
|
|
|
1436
1437
|
DBOS.launch()
|
|
1437
|
-
assert
|
|
1438
|
+
assert DBOS.application_version == app_version
|
|
1438
1439
|
assert GlobalParams.executor_id == executor_id
|
|
1439
1440
|
wfid = test_workflow()
|
|
1440
1441
|
handle: WorkflowHandle[str] = DBOS.retrieve_workflow(wfid)
|
|
@@ -1827,3 +1828,65 @@ def test_custom_schema(
|
|
|
1827
1828
|
steps = client.list_workflow_steps(handle.workflow_id)
|
|
1828
1829
|
assert len(steps) == 4
|
|
1829
1830
|
assert "transaction" in steps[0]["function_name"]
|
|
1831
|
+
|
|
1832
|
+
|
|
1833
|
+
def test_custom_engine(
|
|
1834
|
+
config: DBOSConfig,
|
|
1835
|
+
cleanup_test_databases: None,
|
|
1836
|
+
db_engine: sa.Engine,
|
|
1837
|
+
skip_with_sqlite: None,
|
|
1838
|
+
) -> None:
|
|
1839
|
+
DBOS.destroy(destroy_registry=True)
|
|
1840
|
+
assert config["system_database_url"]
|
|
1841
|
+
config["application_database_url"] = None
|
|
1842
|
+
system_database_url = config["system_database_url"]
|
|
1843
|
+
|
|
1844
|
+
# Create a custom engine
|
|
1845
|
+
engine = sa.create_engine(system_database_url)
|
|
1846
|
+
config["system_database_engine"] = engine
|
|
1847
|
+
|
|
1848
|
+
# Launch DBOS with the engine. It should fail because the database does not exist.
|
|
1849
|
+
dbos = DBOS(config=config)
|
|
1850
|
+
with pytest.raises(OperationalError):
|
|
1851
|
+
DBOS.launch()
|
|
1852
|
+
DBOS.destroy(destroy_registry=True)
|
|
1853
|
+
|
|
1854
|
+
# Create the database
|
|
1855
|
+
with db_engine.connect() as c:
|
|
1856
|
+
c.execution_options(isolation_level="AUTOCOMMIT")
|
|
1857
|
+
sysdb_name = sa.make_url(config["system_database_url"]).database
|
|
1858
|
+
c.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
|
|
1859
|
+
|
|
1860
|
+
# Launch DBOS again using the custom pool. It should succeed despite the bogus URL.
|
|
1861
|
+
config["system_database_url"] = "postgresql://bogus:url@not:42/fake"
|
|
1862
|
+
dbos = DBOS(config=config)
|
|
1863
|
+
DBOS.launch()
|
|
1864
|
+
|
|
1865
|
+
key = "key"
|
|
1866
|
+
val = "val"
|
|
1867
|
+
|
|
1868
|
+
@DBOS.workflow()
|
|
1869
|
+
def recv_workflow() -> Any:
|
|
1870
|
+
DBOS.set_event(key, val)
|
|
1871
|
+
return DBOS.recv()
|
|
1872
|
+
|
|
1873
|
+
assert dbos._sys_db.engine == engine
|
|
1874
|
+
handle = DBOS.start_workflow(recv_workflow)
|
|
1875
|
+
assert DBOS.get_event(handle.workflow_id, key) == val
|
|
1876
|
+
DBOS.send(handle.workflow_id, val)
|
|
1877
|
+
assert handle.get_result() == val
|
|
1878
|
+
assert len(DBOS.list_workflows()) == 2
|
|
1879
|
+
steps = DBOS.list_workflow_steps(handle.workflow_id)
|
|
1880
|
+
assert len(steps) == 3
|
|
1881
|
+
assert "setEvent" in steps[0]["function_name"]
|
|
1882
|
+
DBOS.destroy(destroy_registry=True)
|
|
1883
|
+
|
|
1884
|
+
# Test custom engine with client
|
|
1885
|
+
client = DBOSClient(
|
|
1886
|
+
system_database_url=config["system_database_url"],
|
|
1887
|
+
system_database_engine=config["system_database_engine"],
|
|
1888
|
+
)
|
|
1889
|
+
assert len(client.list_workflows()) == 2
|
|
1890
|
+
steps = client.list_workflow_steps(handle.workflow_id)
|
|
1891
|
+
assert len(steps) == 3
|
|
1892
|
+
assert "setEvent" in steps[0]["function_name"]
|
|
@@ -147,7 +147,7 @@ def test_debouncer_queue(dbos: DBOS) -> None:
|
|
|
147
147
|
return x
|
|
148
148
|
|
|
149
149
|
first_value, second_value, third_value, fourth_value = 0, 1, 2, 3
|
|
150
|
-
queue = Queue("test-queue")
|
|
150
|
+
queue = Queue("test-queue", priority_enabled=True)
|
|
151
151
|
|
|
152
152
|
debouncer = Debouncer.create(workflow, queue=queue)
|
|
153
153
|
debounce_period_sec = 2
|
|
@@ -136,12 +136,10 @@ def test_notification_errors(dbos: DBOS, skip_with_sqlite: None) -> None:
|
|
|
136
136
|
system_database = cast(PostgresSystemDatabase, dbos._sys_db)
|
|
137
137
|
while system_database.notification_conn is None:
|
|
138
138
|
time.sleep(1)
|
|
139
|
-
system_database.
|
|
140
|
-
assert system_database.notification_conn.closed == 1
|
|
139
|
+
system_database._cleanup_connections()
|
|
141
140
|
|
|
142
|
-
# Wait for the connection to
|
|
143
|
-
|
|
144
|
-
time.sleep(1)
|
|
141
|
+
# Wait for the connection to re-establish
|
|
142
|
+
time.sleep(3)
|
|
145
143
|
|
|
146
144
|
dest_uuid = str("sruuid1")
|
|
147
145
|
with SetWorkflowID(dest_uuid):
|
|
@@ -55,6 +55,10 @@ def test_simple_queue(dbos: DBOS) -> None:
|
|
|
55
55
|
|
|
56
56
|
queue = Queue("test_queue")
|
|
57
57
|
|
|
58
|
+
# Test that redeclaring a queue is an exception
|
|
59
|
+
with pytest.raises(Exception):
|
|
60
|
+
Queue(queue.name)
|
|
61
|
+
|
|
58
62
|
with SetWorkflowID(wfid):
|
|
59
63
|
handle = queue.enqueue(test_workflow, "abc", "123")
|
|
60
64
|
assert handle.get_result() == "abcd123"
|
|
@@ -908,7 +912,6 @@ def test_timeout_queue(dbos: DBOS) -> None:
|
|
|
908
912
|
|
|
909
913
|
# Verify if a parent called with a timeout enqueues a blocked child
|
|
910
914
|
# then exits the deadline propagates and the child is cancelled.
|
|
911
|
-
queue = Queue("regular_queue")
|
|
912
915
|
|
|
913
916
|
@DBOS.workflow()
|
|
914
917
|
def exiting_parent_workflow() -> str:
|
|
@@ -132,8 +132,9 @@ def test_sqlite_systemdb_migration() -> None:
|
|
|
132
132
|
sys_db = SystemDatabase.create(
|
|
133
133
|
system_database_url=sqlite_url,
|
|
134
134
|
engine_kwargs={},
|
|
135
|
-
|
|
135
|
+
engine=None,
|
|
136
136
|
schema=None,
|
|
137
|
+
debug_mode=False,
|
|
137
138
|
)
|
|
138
139
|
|
|
139
140
|
# Run migrations
|
|
@@ -76,7 +76,7 @@ def test_spans(config: DBOSConfig) -> None:
|
|
|
76
76
|
for log in logs:
|
|
77
77
|
assert log.log_record.attributes is not None
|
|
78
78
|
assert (
|
|
79
|
-
log.log_record.attributes["applicationVersion"] ==
|
|
79
|
+
log.log_record.attributes["applicationVersion"] == DBOS.application_version
|
|
80
80
|
)
|
|
81
81
|
assert log.log_record.attributes["executorID"] == GlobalParams.executor_id
|
|
82
82
|
assert log.log_record.attributes["foo"] == "bar"
|
|
@@ -98,7 +98,7 @@ def test_spans(config: DBOSConfig) -> None:
|
|
|
98
98
|
# Skip the manual span because it was not created by DBOS.tracer
|
|
99
99
|
continue
|
|
100
100
|
assert span.attributes is not None
|
|
101
|
-
assert span.attributes["applicationVersion"] ==
|
|
101
|
+
assert span.attributes["applicationVersion"] == DBOS.application_version
|
|
102
102
|
assert span.attributes["executorID"] == GlobalParams.executor_id
|
|
103
103
|
assert span.context is not None
|
|
104
104
|
assert span.attributes["foo"] == "bar"
|
|
@@ -203,7 +203,7 @@ async def test_spans_async(dbos: DBOS) -> None:
|
|
|
203
203
|
for log in logs:
|
|
204
204
|
assert log.log_record.attributes is not None
|
|
205
205
|
assert (
|
|
206
|
-
log.log_record.attributes["applicationVersion"] ==
|
|
206
|
+
log.log_record.attributes["applicationVersion"] == DBOS.application_version
|
|
207
207
|
)
|
|
208
208
|
assert log.log_record.attributes["executorID"] == GlobalParams.executor_id
|
|
209
209
|
# Make sure the log record has a span_id and trace_id
|
|
@@ -226,7 +226,7 @@ async def test_spans_async(dbos: DBOS) -> None:
|
|
|
226
226
|
# Skip the manual span because it was not created by DBOS.tracer
|
|
227
227
|
continue
|
|
228
228
|
assert span.attributes is not None
|
|
229
|
-
assert span.attributes["applicationVersion"] ==
|
|
229
|
+
assert span.attributes["applicationVersion"] == DBOS.application_version
|
|
230
230
|
assert span.attributes["executorID"] == GlobalParams.executor_id
|
|
231
231
|
assert span.context is not None
|
|
232
232
|
assert span.context.span_id > 0
|
|
@@ -316,7 +316,7 @@ def test_wf_fastapi(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
|
|
|
316
316
|
assert len(logs) == 1
|
|
317
317
|
assert logs[0].log_record.attributes is not None
|
|
318
318
|
assert (
|
|
319
|
-
logs[0].log_record.attributes["applicationVersion"] ==
|
|
319
|
+
logs[0].log_record.attributes["applicationVersion"] == DBOS.application_version
|
|
320
320
|
)
|
|
321
321
|
assert logs[0].log_record.span_id is not None and logs[0].log_record.span_id > 0
|
|
322
322
|
assert logs[0].log_record.trace_id is not None and logs[0].log_record.trace_id > 0
|
|
@@ -331,7 +331,7 @@ def test_wf_fastapi(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
|
|
|
331
331
|
|
|
332
332
|
for span in spans:
|
|
333
333
|
assert span.attributes is not None
|
|
334
|
-
assert span.attributes["applicationVersion"] ==
|
|
334
|
+
assert span.attributes["applicationVersion"] == DBOS.application_version
|
|
335
335
|
assert span.context is not None
|
|
336
336
|
assert span.context.span_id > 0
|
|
337
337
|
assert span.context.trace_id > 0
|
|
@@ -389,7 +389,7 @@ def test_disable_otlp_no_spans(config: DBOSConfig) -> None:
|
|
|
389
389
|
for log in logs:
|
|
390
390
|
assert log.log_record.attributes is not None
|
|
391
391
|
assert (
|
|
392
|
-
log.log_record.attributes["applicationVersion"] ==
|
|
392
|
+
log.log_record.attributes["applicationVersion"] == DBOS.application_version
|
|
393
393
|
)
|
|
394
394
|
assert log.log_record.attributes["executorID"] == GlobalParams.executor_id
|
|
395
395
|
assert log.log_record.attributes["foo"] == "bar"
|
|
@@ -39,7 +39,7 @@ def test_list_workflow(dbos: DBOS) -> None:
|
|
|
39
39
|
assert output.updated_at is not None and output.updated_at > 0
|
|
40
40
|
assert output.queue_name == None
|
|
41
41
|
assert output.executor_id == GlobalParams.executor_id
|
|
42
|
-
assert output.app_version ==
|
|
42
|
+
assert output.app_version == DBOS.application_version
|
|
43
43
|
assert output.app_id == ""
|
|
44
44
|
assert output.recovery_attempts == 1
|
|
45
45
|
assert output.workflow_timeout_ms is None
|
|
@@ -77,7 +77,7 @@ def test_list_workflow(dbos: DBOS) -> None:
|
|
|
77
77
|
# Test searching by application version
|
|
78
78
|
outputs = DBOS.list_workflows(app_version="no")
|
|
79
79
|
assert len(outputs) == 0
|
|
80
|
-
outputs = DBOS.list_workflows(app_version=
|
|
80
|
+
outputs = DBOS.list_workflows(app_version=DBOS.application_version)
|
|
81
81
|
assert len(outputs) == 1
|
|
82
82
|
|
|
83
83
|
|
|
@@ -109,7 +109,7 @@ def test_list_workflow_error(dbos: DBOS) -> None:
|
|
|
109
109
|
assert output.updated_at is not None and output.updated_at > 0
|
|
110
110
|
assert output.queue_name == None
|
|
111
111
|
assert output.executor_id == GlobalParams.executor_id
|
|
112
|
-
assert output.app_version ==
|
|
112
|
+
assert output.app_version == DBOS.application_version
|
|
113
113
|
assert output.app_id == ""
|
|
114
114
|
assert output.recovery_attempts == 1
|
|
115
115
|
assert output.workflow_timeout_ms is None
|
|
@@ -289,7 +289,7 @@ def test_queued_workflows(dbos: DBOS, skip_with_sqlite_imprecise_time: None) ->
|
|
|
289
289
|
assert workflow.error is None
|
|
290
290
|
assert "blocking_step" in workflow.name
|
|
291
291
|
assert workflow.executor_id == GlobalParams.executor_id
|
|
292
|
-
assert workflow.app_version ==
|
|
292
|
+
assert workflow.app_version == DBOS.application_version
|
|
293
293
|
assert workflow.created_at is not None and workflow.created_at > 0
|
|
294
294
|
assert workflow.updated_at is not None and workflow.updated_at > 0
|
|
295
295
|
assert workflow.recovery_attempts == 1
|
|
@@ -323,7 +323,7 @@ def test_queued_workflows(dbos: DBOS, skip_with_sqlite_imprecise_time: None) ->
|
|
|
323
323
|
assert workflow.error is None
|
|
324
324
|
assert "blocking_step" in workflow.name
|
|
325
325
|
assert workflow.executor_id == GlobalParams.executor_id
|
|
326
|
-
assert workflow.app_version ==
|
|
326
|
+
assert workflow.app_version == DBOS.application_version
|
|
327
327
|
assert workflow.created_at is not None and workflow.created_at > 0
|
|
328
328
|
assert workflow.updated_at is not None and workflow.updated_at > 0
|
|
329
329
|
assert workflow.recovery_attempts == 1
|
|
@@ -1,14 +1,11 @@
|
|
|
1
1
|
import threading
|
|
2
2
|
import time
|
|
3
3
|
import uuid
|
|
4
|
-
from typing import Callable
|
|
5
4
|
|
|
6
5
|
import pytest
|
|
7
6
|
import sqlalchemy as sa
|
|
8
7
|
|
|
9
|
-
# Public API
|
|
10
8
|
from dbos import DBOS, Queue, SetWorkflowID
|
|
11
|
-
from dbos._dbos import DBOSConfiguredInstance
|
|
12
9
|
from dbos._error import DBOSAwaitedWorkflowCancelledError
|
|
13
10
|
from dbos._schemas.application_database import ApplicationSchema
|
|
14
11
|
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
|
@@ -58,7 +55,7 @@ def test_cancel_resume(dbos: DBOS) -> None:
|
|
|
58
55
|
|
|
59
56
|
# Resume the workflow. Verify it completes successfully.
|
|
60
57
|
handle = DBOS.resume_workflow(wfid)
|
|
61
|
-
assert handle.get_status().app_version ==
|
|
58
|
+
assert handle.get_status().app_version == DBOS.application_version
|
|
62
59
|
assert handle.get_status().queue_name == INTERNAL_QUEUE_NAME
|
|
63
60
|
assert handle.get_result() == input
|
|
64
61
|
assert steps_completed == 2
|
|
@@ -231,7 +228,7 @@ def test_fork_steps(
|
|
|
231
228
|
with SetWorkflowID(fork_id):
|
|
232
229
|
forked_handle = DBOS.fork_workflow(wfid, 3)
|
|
233
230
|
assert forked_handle.workflow_id == fork_id
|
|
234
|
-
assert forked_handle.get_status().app_version ==
|
|
231
|
+
assert forked_handle.get_status().app_version == DBOS.application_version
|
|
235
232
|
assert forked_handle.get_result() == output
|
|
236
233
|
|
|
237
234
|
assert stepOneCount == 1
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dbos-1.15.0a4 → dbos-1.15.0a6}/dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|