dbos 0.22.0a11__tar.gz → 0.23.0a2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-0.22.0a11 → dbos-0.23.0a2}/PKG-INFO +1 -1
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_context.py +3 -1
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_core.py +26 -1
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_dbos.py +23 -10
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_error.py +11 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_logger.py +5 -6
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_queue.py +5 -1
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_recovery.py +4 -2
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_sys_db.py +4 -6
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_tracer.py +4 -4
- dbos-0.23.0a2/dbos/_utils.py +6 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/pyproject.toml +1 -1
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_admin_server.py +5 -1
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_dbos.py +6 -4
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_spans.py +4 -3
- dbos-0.23.0a2/tests/test_workflow_cancel.py +145 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/LICENSE +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/README.md +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/__init__.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_admin_server.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_app_db.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_classproperty.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_cloudutils/authentication.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_cloudutils/cloudutils.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_cloudutils/databases.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_croniter.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_db_wizard.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_dbos_config.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_fastapi.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_flask.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_kafka.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_kafka_message.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_migrations/env.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_migrations/script.py.mako +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_outcome.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_registrations.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_request.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_roles.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_scheduler.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_schemas/__init__.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_schemas/application_database.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_schemas/system_database.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_serialization.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_templates/dbos-db-starter/__package/main.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_workflow_commands.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/cli/_github_init.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/cli/_template_init.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/cli/cli.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/dbos-config.schema.json +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/py.typed +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/__init__.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/atexit_no_ctor.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/atexit_no_launch.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/classdefs.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/conftest.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/more_classdefs.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/queuedworkflow.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_async.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_classdecorators.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_concurrency.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_config.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_croniter.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_failures.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_fastapi.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_fastapi_roles.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_flask.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_kafka.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_outcome.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_package.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_queue.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_scheduler.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_schema_migration.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_singleton.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_sqlalchemy.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/tests/test_workflow_cmds.py +0 -0
- {dbos-0.22.0a11 → dbos-0.23.0a2}/version/__init__.py +0 -0
|
@@ -12,6 +12,8 @@ from typing import List, Literal, Optional, Type, TypedDict
|
|
|
12
12
|
from opentelemetry.trace import Span, Status, StatusCode
|
|
13
13
|
from sqlalchemy.orm import Session
|
|
14
14
|
|
|
15
|
+
from dbos._utils import GlobalParams
|
|
16
|
+
|
|
15
17
|
from ._logger import dbos_logger
|
|
16
18
|
from ._request import Request
|
|
17
19
|
from ._tracer import dbos_tracer
|
|
@@ -48,7 +50,7 @@ class TracedAttributes(TypedDict, total=False):
|
|
|
48
50
|
|
|
49
51
|
class DBOSContext:
|
|
50
52
|
def __init__(self) -> None:
|
|
51
|
-
self.executor_id =
|
|
53
|
+
self.executor_id = GlobalParams.executor_id
|
|
52
54
|
self.app_id = os.environ.get("DBOS__APPID", "")
|
|
53
55
|
|
|
54
56
|
self.logger = dbos_logger
|
|
@@ -22,6 +22,7 @@ from typing import (
|
|
|
22
22
|
)
|
|
23
23
|
|
|
24
24
|
from dbos._outcome import Immediate, NoResult, Outcome, Pending
|
|
25
|
+
from dbos._utils import GlobalParams
|
|
25
26
|
|
|
26
27
|
from ._app_db import ApplicationDatabase, TransactionResultInternal
|
|
27
28
|
|
|
@@ -51,6 +52,7 @@ from ._error import (
|
|
|
51
52
|
DBOSMaxStepRetriesExceeded,
|
|
52
53
|
DBOSNonExistentWorkflowError,
|
|
53
54
|
DBOSRecoveryError,
|
|
55
|
+
DBOSWorkflowCancelledError,
|
|
54
56
|
DBOSWorkflowConflictIDError,
|
|
55
57
|
DBOSWorkflowFunctionNotFoundError,
|
|
56
58
|
)
|
|
@@ -163,7 +165,7 @@ def _init_workflow(
|
|
|
163
165
|
"output": None,
|
|
164
166
|
"error": None,
|
|
165
167
|
"app_id": ctx.app_id,
|
|
166
|
-
"app_version":
|
|
168
|
+
"app_version": GlobalParams.app_version,
|
|
167
169
|
"executor_id": ctx.executor_id,
|
|
168
170
|
"request": (
|
|
169
171
|
_serialization.serialize(ctx.request) if ctx.request is not None else None
|
|
@@ -224,6 +226,8 @@ def _get_wf_invoke_func(
|
|
|
224
226
|
)
|
|
225
227
|
output = wf_handle.get_result()
|
|
226
228
|
return output
|
|
229
|
+
except DBOSWorkflowCancelledError as error:
|
|
230
|
+
raise
|
|
227
231
|
except Exception as error:
|
|
228
232
|
status["status"] = "ERROR"
|
|
229
233
|
status["error"] = _serialization.serialize_exception(error)
|
|
@@ -539,6 +543,13 @@ def decorate_transaction(
|
|
|
539
543
|
raise DBOSException(
|
|
540
544
|
f"Function {func.__name__} invoked before DBOS initialized"
|
|
541
545
|
)
|
|
546
|
+
|
|
547
|
+
ctx = assert_current_dbos_context()
|
|
548
|
+
if dbosreg.is_workflow_cancelled(ctx.workflow_id):
|
|
549
|
+
raise DBOSWorkflowCancelledError(
|
|
550
|
+
f"Workflow {ctx.workflow_id} is cancelled. Aborting transaction {func.__name__}."
|
|
551
|
+
)
|
|
552
|
+
|
|
542
553
|
dbos = dbosreg.dbos
|
|
543
554
|
with dbos._app_db.sessionmaker() as session:
|
|
544
555
|
attributes: TracedAttributes = {
|
|
@@ -560,6 +571,12 @@ def decorate_transaction(
|
|
|
560
571
|
backoff_factor = 1.5
|
|
561
572
|
max_retry_wait_seconds = 2.0
|
|
562
573
|
while True:
|
|
574
|
+
|
|
575
|
+
if dbosreg.is_workflow_cancelled(ctx.workflow_id):
|
|
576
|
+
raise DBOSWorkflowCancelledError(
|
|
577
|
+
f"Workflow {ctx.workflow_id} is cancelled. Aborting transaction {func.__name__}."
|
|
578
|
+
)
|
|
579
|
+
|
|
563
580
|
has_recorded_error = False
|
|
564
581
|
txn_error: Optional[Exception] = None
|
|
565
582
|
try:
|
|
@@ -710,6 +727,13 @@ def decorate_step(
|
|
|
710
727
|
"operationType": OperationType.STEP.value,
|
|
711
728
|
}
|
|
712
729
|
|
|
730
|
+
# Check if the workflow is cancelled
|
|
731
|
+
ctx = assert_current_dbos_context()
|
|
732
|
+
if dbosreg.is_workflow_cancelled(ctx.workflow_id):
|
|
733
|
+
raise DBOSWorkflowCancelledError(
|
|
734
|
+
f"Workflow {ctx.workflow_id} is cancelled. Aborting step {func.__name__}."
|
|
735
|
+
)
|
|
736
|
+
|
|
713
737
|
attempts = max_attempts if retries_allowed else 1
|
|
714
738
|
max_retry_interval_seconds: float = 3600 # 1 Hour
|
|
715
739
|
|
|
@@ -800,6 +824,7 @@ def decorate_step(
|
|
|
800
824
|
ctx = get_local_dbos_context()
|
|
801
825
|
if ctx and ctx.is_step():
|
|
802
826
|
# Call the original function directly
|
|
827
|
+
|
|
803
828
|
return func(*args, **kwargs)
|
|
804
829
|
if ctx and ctx.is_within_workflow():
|
|
805
830
|
assert ctx.is_workflow(), "Steps must be called from within workflows"
|
|
@@ -32,6 +32,8 @@ from typing import (
|
|
|
32
32
|
|
|
33
33
|
from opentelemetry.trace import Span
|
|
34
34
|
|
|
35
|
+
from dbos._utils import GlobalParams
|
|
36
|
+
|
|
35
37
|
from ._classproperty import classproperty
|
|
36
38
|
from ._core import (
|
|
37
39
|
TEMP_SEND_WF_NAME,
|
|
@@ -155,6 +157,7 @@ class DBOSRegistry:
|
|
|
155
157
|
self.pollers: list[RegisteredJob] = []
|
|
156
158
|
self.dbos: Optional[DBOS] = None
|
|
157
159
|
self.config: Optional[ConfigFile] = None
|
|
160
|
+
self.workflow_cancelled_map: dict[str, bool] = {}
|
|
158
161
|
|
|
159
162
|
def register_wf_function(self, name: str, wrapped_func: F, functype: str) -> None:
|
|
160
163
|
if name in self.function_type_map:
|
|
@@ -197,6 +200,15 @@ class DBOSRegistry:
|
|
|
197
200
|
else:
|
|
198
201
|
self.instance_info_map[fn] = inst
|
|
199
202
|
|
|
203
|
+
def cancel_workflow(self, workflow_id: str) -> None:
|
|
204
|
+
self.workflow_cancelled_map[workflow_id] = True
|
|
205
|
+
|
|
206
|
+
def is_workflow_cancelled(self, workflow_id: str) -> bool:
|
|
207
|
+
return self.workflow_cancelled_map.get(workflow_id, False)
|
|
208
|
+
|
|
209
|
+
def clear_workflow_cancelled(self, workflow_id: str) -> None:
|
|
210
|
+
self.workflow_cancelled_map.pop(workflow_id, None)
|
|
211
|
+
|
|
200
212
|
def compute_app_version(self) -> str:
|
|
201
213
|
"""
|
|
202
214
|
An application's version is computed from a hash of the source of its workflows.
|
|
@@ -280,6 +292,8 @@ class DBOS:
|
|
|
280
292
|
if destroy_registry:
|
|
281
293
|
global _dbos_global_registry
|
|
282
294
|
_dbos_global_registry = None
|
|
295
|
+
GlobalParams.app_version = os.environ.get("DBOS__APPVERSION", "")
|
|
296
|
+
GlobalParams.executor_id = os.environ.get("DBOS__VMID", "local")
|
|
283
297
|
|
|
284
298
|
def __init__(
|
|
285
299
|
self,
|
|
@@ -309,8 +323,6 @@ class DBOS:
|
|
|
309
323
|
self.flask: Optional["Flask"] = flask
|
|
310
324
|
self._executor_field: Optional[ThreadPoolExecutor] = None
|
|
311
325
|
self._background_threads: List[threading.Thread] = []
|
|
312
|
-
self._executor_id: str = os.environ.get("DBOS__VMID", "local")
|
|
313
|
-
self.app_version: str = os.environ.get("DBOS__APPVERSION", "")
|
|
314
326
|
|
|
315
327
|
# If using FastAPI, set up middleware and lifecycle events
|
|
316
328
|
if self.fastapi is not None:
|
|
@@ -379,10 +391,9 @@ class DBOS:
|
|
|
379
391
|
dbos_logger.warning(f"DBOS was already launched")
|
|
380
392
|
return
|
|
381
393
|
self._launched = True
|
|
382
|
-
if
|
|
383
|
-
|
|
384
|
-
dbos_logger.info(f"Application version: {
|
|
385
|
-
dbos_tracer.app_version = self.app_version
|
|
394
|
+
if GlobalParams.app_version == "":
|
|
395
|
+
GlobalParams.app_version = self._registry.compute_app_version()
|
|
396
|
+
dbos_logger.info(f"Application version: {GlobalParams.app_version}")
|
|
386
397
|
self._executor_field = ThreadPoolExecutor(max_workers=64)
|
|
387
398
|
self._sys_db_field = SystemDatabase(self.config)
|
|
388
399
|
self._app_db_field = ApplicationDatabase(self.config)
|
|
@@ -392,15 +403,15 @@ class DBOS:
|
|
|
392
403
|
self._admin_server_field = AdminServer(dbos=self, port=admin_port)
|
|
393
404
|
|
|
394
405
|
workflow_ids = self._sys_db.get_pending_workflows(
|
|
395
|
-
|
|
406
|
+
GlobalParams.executor_id, GlobalParams.app_version
|
|
396
407
|
)
|
|
397
408
|
if (len(workflow_ids)) > 0:
|
|
398
409
|
self.logger.info(
|
|
399
|
-
f"Recovering {len(workflow_ids)} workflows from application version {
|
|
410
|
+
f"Recovering {len(workflow_ids)} workflows from application version {GlobalParams.app_version}"
|
|
400
411
|
)
|
|
401
412
|
else:
|
|
402
413
|
self.logger.info(
|
|
403
|
-
f"No workflows to recover from application version {
|
|
414
|
+
f"No workflows to recover from application version {GlobalParams.app_version}"
|
|
404
415
|
)
|
|
405
416
|
|
|
406
417
|
self._executor.submit(startup_recovery_thread, self, workflow_ids)
|
|
@@ -446,7 +457,7 @@ class DBOS:
|
|
|
446
457
|
# to enable their export in DBOS Cloud
|
|
447
458
|
for handler in dbos_logger.handlers:
|
|
448
459
|
handler.flush()
|
|
449
|
-
add_otlp_to_all_loggers(
|
|
460
|
+
add_otlp_to_all_loggers()
|
|
450
461
|
except Exception:
|
|
451
462
|
dbos_logger.error(f"DBOS failed to launch: {traceback.format_exc()}")
|
|
452
463
|
raise
|
|
@@ -844,11 +855,13 @@ class DBOS:
|
|
|
844
855
|
def cancel_workflow(cls, workflow_id: str) -> None:
|
|
845
856
|
"""Cancel a workflow by ID."""
|
|
846
857
|
_get_dbos_instance()._sys_db.cancel_workflow(workflow_id)
|
|
858
|
+
_get_or_create_dbos_registry().cancel_workflow(workflow_id)
|
|
847
859
|
|
|
848
860
|
@classmethod
|
|
849
861
|
def resume_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
|
|
850
862
|
"""Resume a workflow by ID."""
|
|
851
863
|
_get_dbos_instance()._sys_db.resume_workflow(workflow_id)
|
|
864
|
+
_get_or_create_dbos_registry().clear_workflow_cancelled(workflow_id)
|
|
852
865
|
return execute_workflow_by_id(_get_dbos_instance(), workflow_id, False)
|
|
853
866
|
|
|
854
867
|
@classproperty
|
|
@@ -36,6 +36,7 @@ class DBOSErrorCode(Enum):
|
|
|
36
36
|
MaxStepRetriesExceeded = 7
|
|
37
37
|
NotAuthorized = 8
|
|
38
38
|
ConflictingWorkflowError = 9
|
|
39
|
+
WorkflowCancelled = 10
|
|
39
40
|
ConflictingRegistrationError = 25
|
|
40
41
|
|
|
41
42
|
|
|
@@ -130,6 +131,16 @@ class DBOSMaxStepRetriesExceeded(DBOSException):
|
|
|
130
131
|
)
|
|
131
132
|
|
|
132
133
|
|
|
134
|
+
class DBOSWorkflowCancelledError(DBOSException):
|
|
135
|
+
"""Exception raised when the workflow has already been cancelled."""
|
|
136
|
+
|
|
137
|
+
def __init__(self, msg: str) -> None:
|
|
138
|
+
super().__init__(
|
|
139
|
+
msg,
|
|
140
|
+
dbos_error_code=DBOSErrorCode.WorkflowCancelled.value,
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
|
|
133
144
|
class DBOSConflictingRegistrationError(DBOSException):
|
|
134
145
|
"""Exception raised when conflicting decorators are applied to the same function."""
|
|
135
146
|
|
|
@@ -8,6 +8,8 @@ from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
|
|
|
8
8
|
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
|
|
9
9
|
from opentelemetry.sdk.resources import Resource
|
|
10
10
|
|
|
11
|
+
from dbos._utils import GlobalParams
|
|
12
|
+
|
|
11
13
|
if TYPE_CHECKING:
|
|
12
14
|
from ._dbos_config import ConfigFile
|
|
13
15
|
|
|
@@ -19,13 +21,11 @@ class DBOSLogTransformer(logging.Filter):
|
|
|
19
21
|
def __init__(self) -> None:
|
|
20
22
|
super().__init__()
|
|
21
23
|
self.app_id = os.environ.get("DBOS__APPID", "")
|
|
22
|
-
self.app_version = os.environ.get("DBOS__APPVERSION", "")
|
|
23
|
-
self.executor_id = os.environ.get("DBOS__VMID", "local")
|
|
24
24
|
|
|
25
25
|
def filter(self, record: Any) -> bool:
|
|
26
26
|
record.applicationID = self.app_id
|
|
27
|
-
record.applicationVersion =
|
|
28
|
-
record.executorID =
|
|
27
|
+
record.applicationVersion = GlobalParams.app_version
|
|
28
|
+
record.executorID = GlobalParams.executor_id
|
|
29
29
|
return True
|
|
30
30
|
|
|
31
31
|
|
|
@@ -86,9 +86,8 @@ def config_logger(config: "ConfigFile") -> None:
|
|
|
86
86
|
dbos_logger.addFilter(_otlp_transformer)
|
|
87
87
|
|
|
88
88
|
|
|
89
|
-
def add_otlp_to_all_loggers(
|
|
89
|
+
def add_otlp_to_all_loggers() -> None:
|
|
90
90
|
if _otlp_handler is not None and _otlp_transformer is not None:
|
|
91
|
-
_otlp_transformer.app_version = app_version
|
|
92
91
|
root = logging.root
|
|
93
92
|
|
|
94
93
|
root.addHandler(_otlp_handler)
|
|
@@ -5,6 +5,8 @@ from typing import TYPE_CHECKING, Optional, TypedDict
|
|
|
5
5
|
from psycopg import errors
|
|
6
6
|
from sqlalchemy.exc import OperationalError
|
|
7
7
|
|
|
8
|
+
from dbos._utils import GlobalParams
|
|
9
|
+
|
|
8
10
|
from ._core import P, R, execute_workflow_by_id, start_workflow
|
|
9
11
|
|
|
10
12
|
if TYPE_CHECKING:
|
|
@@ -71,7 +73,9 @@ def queue_thread(stop_event: threading.Event, dbos: "DBOS") -> None:
|
|
|
71
73
|
return
|
|
72
74
|
for _, queue in dbos._registry.queue_info_map.items():
|
|
73
75
|
try:
|
|
74
|
-
wf_ids = dbos._sys_db.start_queued_workflows(
|
|
76
|
+
wf_ids = dbos._sys_db.start_queued_workflows(
|
|
77
|
+
queue, GlobalParams.executor_id
|
|
78
|
+
)
|
|
75
79
|
for id in wf_ids:
|
|
76
80
|
execute_workflow_by_id(dbos, id)
|
|
77
81
|
except OperationalError as e:
|
|
@@ -4,6 +4,8 @@ import time
|
|
|
4
4
|
import traceback
|
|
5
5
|
from typing import TYPE_CHECKING, Any, List
|
|
6
6
|
|
|
7
|
+
from dbos._utils import GlobalParams
|
|
8
|
+
|
|
7
9
|
from ._core import execute_workflow_by_id
|
|
8
10
|
from ._error import DBOSWorkflowFunctionNotFoundError
|
|
9
11
|
from ._sys_db import GetPendingWorkflowsOutput
|
|
@@ -45,7 +47,7 @@ def recover_pending_workflows(
|
|
|
45
47
|
for executor_id in executor_ids:
|
|
46
48
|
dbos.logger.debug(f"Recovering pending workflows for executor: {executor_id}")
|
|
47
49
|
pending_workflows = dbos._sys_db.get_pending_workflows(
|
|
48
|
-
executor_id,
|
|
50
|
+
executor_id, GlobalParams.app_version
|
|
49
51
|
)
|
|
50
52
|
for pending_workflow in pending_workflows:
|
|
51
53
|
if (
|
|
@@ -64,6 +66,6 @@ def recover_pending_workflows(
|
|
|
64
66
|
execute_workflow_by_id(dbos, pending_workflow.workflow_uuid)
|
|
65
67
|
)
|
|
66
68
|
dbos.logger.info(
|
|
67
|
-
f"Recovering {len(pending_workflows)} workflows from version {
|
|
69
|
+
f"Recovering {len(pending_workflows)} workflows from version {GlobalParams.app_version}"
|
|
68
70
|
)
|
|
69
71
|
return workflow_handles
|
|
@@ -27,6 +27,8 @@ from alembic.config import Config
|
|
|
27
27
|
from sqlalchemy.exc import DBAPIError
|
|
28
28
|
from sqlalchemy.sql import func
|
|
29
29
|
|
|
30
|
+
from dbos._utils import GlobalParams
|
|
31
|
+
|
|
30
32
|
from . import _serialization
|
|
31
33
|
from ._dbos_config import ConfigFile
|
|
32
34
|
from ._error import (
|
|
@@ -192,9 +194,7 @@ class SystemDatabase:
|
|
|
192
194
|
port=config["database"]["port"],
|
|
193
195
|
database="postgres",
|
|
194
196
|
# fills the "application_name" column in pg_stat_activity
|
|
195
|
-
query={
|
|
196
|
-
"application_name": f"dbos_transact_{os.environ.get('DBOS__VMID', 'local')}"
|
|
197
|
-
},
|
|
197
|
+
query={"application_name": f"dbos_transact_{GlobalParams.executor_id}"},
|
|
198
198
|
)
|
|
199
199
|
engine = sa.create_engine(postgres_db_url)
|
|
200
200
|
with engine.connect() as conn:
|
|
@@ -214,9 +214,7 @@ class SystemDatabase:
|
|
|
214
214
|
port=config["database"]["port"],
|
|
215
215
|
database=sysdb_name,
|
|
216
216
|
# fills the "application_name" column in pg_stat_activity
|
|
217
|
-
query={
|
|
218
|
-
"application_name": f"dbos_transact_{os.environ.get('DBOS__VMID', 'local')}"
|
|
219
|
-
},
|
|
217
|
+
query={"application_name": f"dbos_transact_{GlobalParams.executor_id}"},
|
|
220
218
|
)
|
|
221
219
|
|
|
222
220
|
# Create a connection pool for the system database
|
|
@@ -7,6 +7,8 @@ from opentelemetry.sdk.trace import TracerProvider
|
|
|
7
7
|
from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
|
|
8
8
|
from opentelemetry.trace import Span
|
|
9
9
|
|
|
10
|
+
from dbos._utils import GlobalParams
|
|
11
|
+
|
|
10
12
|
from ._dbos_config import ConfigFile
|
|
11
13
|
|
|
12
14
|
if TYPE_CHECKING:
|
|
@@ -17,8 +19,6 @@ class DBOSTracer:
|
|
|
17
19
|
|
|
18
20
|
def __init__(self) -> None:
|
|
19
21
|
self.app_id = os.environ.get("DBOS__APPID", None)
|
|
20
|
-
self.app_version = os.environ.get("DBOS__APPVERSION", None)
|
|
21
|
-
self.executor_id = os.environ.get("DBOS__VMID", "local")
|
|
22
22
|
self.provider: Optional[TracerProvider] = None
|
|
23
23
|
|
|
24
24
|
def config(self, config: ConfigFile) -> None:
|
|
@@ -51,8 +51,8 @@ class DBOSTracer:
|
|
|
51
51
|
context = trace.set_span_in_context(parent) if parent else None
|
|
52
52
|
span: Span = tracer.start_span(name=attributes["name"], context=context)
|
|
53
53
|
attributes["applicationID"] = self.app_id
|
|
54
|
-
attributes["applicationVersion"] =
|
|
55
|
-
attributes["executorID"] =
|
|
54
|
+
attributes["applicationVersion"] = GlobalParams.app_version
|
|
55
|
+
attributes["executorID"] = GlobalParams.executor_id
|
|
56
56
|
for k, v in attributes.items():
|
|
57
57
|
if k != "name" and v is not None and isinstance(v, (str, bool, int, float)):
|
|
58
58
|
span.set_attribute(k, v)
|
|
@@ -6,6 +6,7 @@ import requests
|
|
|
6
6
|
|
|
7
7
|
# Public API
|
|
8
8
|
from dbos import DBOS, ConfigFile, Queue, SetWorkflowID, _workflow_commands
|
|
9
|
+
from dbos._utils import GlobalParams
|
|
9
10
|
|
|
10
11
|
|
|
11
12
|
def test_admin_endpoints(dbos: DBOS) -> None:
|
|
@@ -59,10 +60,13 @@ def test_admin_endpoints(dbos: DBOS) -> None:
|
|
|
59
60
|
assert event.is_set(), "Event is not set!"
|
|
60
61
|
|
|
61
62
|
|
|
62
|
-
def test_admin_recovery(
|
|
63
|
+
def test_admin_recovery(config: ConfigFile) -> None:
|
|
63
64
|
os.environ["DBOS__VMID"] = "testexecutor"
|
|
64
65
|
os.environ["DBOS__APPVERSION"] = "testversion"
|
|
65
66
|
os.environ["DBOS__APPID"] = "testappid"
|
|
67
|
+
DBOS.destroy(destroy_registry=True)
|
|
68
|
+
dbos = DBOS(config=config)
|
|
69
|
+
DBOS.launch()
|
|
66
70
|
|
|
67
71
|
step_counter: int = 0
|
|
68
72
|
wf_counter: int = 0
|
|
@@ -19,6 +19,7 @@ from dbos._context import assert_current_dbos_context, get_local_dbos_context
|
|
|
19
19
|
from dbos._error import DBOSConflictingRegistrationError, DBOSMaxStepRetriesExceeded
|
|
20
20
|
from dbos._schemas.system_database import SystemSchema
|
|
21
21
|
from dbos._sys_db import GetWorkflowsInput
|
|
22
|
+
from dbos._utils import GlobalParams
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
def test_simple_workflow(dbos: DBOS) -> None:
|
|
@@ -1240,11 +1241,12 @@ def test_app_version(config: ConfigFile) -> None:
|
|
|
1240
1241
|
DBOS.launch()
|
|
1241
1242
|
|
|
1242
1243
|
# Verify that app version is correctly set to a hex string
|
|
1243
|
-
app_version =
|
|
1244
|
+
app_version = GlobalParams.app_version
|
|
1244
1245
|
assert len(app_version) > 0
|
|
1245
1246
|
assert is_hex(app_version)
|
|
1246
1247
|
|
|
1247
1248
|
DBOS.destroy(destroy_registry=True)
|
|
1249
|
+
assert GlobalParams.app_version == ""
|
|
1248
1250
|
dbos = DBOS(config=config)
|
|
1249
1251
|
|
|
1250
1252
|
@DBOS.workflow()
|
|
@@ -1258,7 +1260,7 @@ def test_app_version(config: ConfigFile) -> None:
|
|
|
1258
1260
|
DBOS.launch()
|
|
1259
1261
|
|
|
1260
1262
|
# Verify stability--the same workflow source produces the same app version.
|
|
1261
|
-
assert
|
|
1263
|
+
assert GlobalParams.app_version == app_version
|
|
1262
1264
|
|
|
1263
1265
|
DBOS.destroy(destroy_registry=True)
|
|
1264
1266
|
dbos = DBOS(config=config)
|
|
@@ -1269,7 +1271,7 @@ def test_app_version(config: ConfigFile) -> None:
|
|
|
1269
1271
|
|
|
1270
1272
|
# Verify that changing the workflow source changes the workflow version
|
|
1271
1273
|
DBOS.launch()
|
|
1272
|
-
assert
|
|
1274
|
+
assert GlobalParams.app_version != app_version
|
|
1273
1275
|
|
|
1274
1276
|
# Verify that version can be overriden with an environment variable
|
|
1275
1277
|
app_version = "12345"
|
|
@@ -1283,7 +1285,7 @@ def test_app_version(config: ConfigFile) -> None:
|
|
|
1283
1285
|
return x
|
|
1284
1286
|
|
|
1285
1287
|
DBOS.launch()
|
|
1286
|
-
assert
|
|
1288
|
+
assert GlobalParams.app_version == app_version
|
|
1287
1289
|
|
|
1288
1290
|
del os.environ["DBOS__APPVERSION"]
|
|
1289
1291
|
|
|
@@ -9,6 +9,7 @@ from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanE
|
|
|
9
9
|
|
|
10
10
|
from dbos import DBOS
|
|
11
11
|
from dbos._tracer import dbos_tracer
|
|
12
|
+
from dbos._utils import GlobalParams
|
|
12
13
|
|
|
13
14
|
|
|
14
15
|
def test_spans(dbos: DBOS) -> None:
|
|
@@ -36,7 +37,7 @@ def test_spans(dbos: DBOS) -> None:
|
|
|
36
37
|
|
|
37
38
|
for span in spans:
|
|
38
39
|
assert span.attributes is not None
|
|
39
|
-
assert span.attributes["applicationVersion"] ==
|
|
40
|
+
assert span.attributes["applicationVersion"] == GlobalParams.app_version
|
|
40
41
|
assert span.context is not None
|
|
41
42
|
|
|
42
43
|
assert spans[0].name == test_step.__name__
|
|
@@ -74,7 +75,7 @@ async def test_spans_async(dbos: DBOS) -> None:
|
|
|
74
75
|
|
|
75
76
|
for span in spans:
|
|
76
77
|
assert span.attributes is not None
|
|
77
|
-
assert span.attributes["applicationVersion"] ==
|
|
78
|
+
assert span.attributes["applicationVersion"] == GlobalParams.app_version
|
|
78
79
|
assert span.context is not None
|
|
79
80
|
|
|
80
81
|
assert spans[0].name == test_step.__name__
|
|
@@ -111,7 +112,7 @@ def test_temp_wf_fastapi(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
|
|
|
111
112
|
|
|
112
113
|
for span in spans:
|
|
113
114
|
assert span.attributes is not None
|
|
114
|
-
assert span.attributes["applicationVersion"] ==
|
|
115
|
+
assert span.attributes["applicationVersion"] == GlobalParams.app_version
|
|
115
116
|
assert span.context is not None
|
|
116
117
|
|
|
117
118
|
assert spans[0].name == test_step_endpoint.__name__
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
import time
|
|
3
|
+
import uuid
|
|
4
|
+
from datetime import datetime, timedelta, timezone
|
|
5
|
+
|
|
6
|
+
# Public API
|
|
7
|
+
from dbos import (
|
|
8
|
+
DBOS,
|
|
9
|
+
ConfigFile,
|
|
10
|
+
Queue,
|
|
11
|
+
SetWorkflowID,
|
|
12
|
+
WorkflowStatusString,
|
|
13
|
+
_workflow_commands,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def test_basic(dbos: DBOS, config: ConfigFile) -> None:
|
|
18
|
+
|
|
19
|
+
steps_completed = 0
|
|
20
|
+
|
|
21
|
+
@DBOS.step()
|
|
22
|
+
def step_one() -> None:
|
|
23
|
+
nonlocal steps_completed
|
|
24
|
+
steps_completed += 1
|
|
25
|
+
print("Step one completed!")
|
|
26
|
+
|
|
27
|
+
@DBOS.step()
|
|
28
|
+
def step_two() -> None:
|
|
29
|
+
nonlocal steps_completed
|
|
30
|
+
steps_completed += 1
|
|
31
|
+
print("Step two completed!")
|
|
32
|
+
|
|
33
|
+
@DBOS.workflow()
|
|
34
|
+
def simple_workflow() -> None:
|
|
35
|
+
step_one()
|
|
36
|
+
dbos.sleep(1)
|
|
37
|
+
step_two()
|
|
38
|
+
print("Executed Simple workflow")
|
|
39
|
+
return
|
|
40
|
+
|
|
41
|
+
# run the workflow
|
|
42
|
+
simple_workflow()
|
|
43
|
+
time.sleep(1) # wait for the workflow to complete
|
|
44
|
+
assert (
|
|
45
|
+
steps_completed == 2
|
|
46
|
+
), f"Expected steps_completed to be 2, but got {steps_completed}"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def test_two_steps_cancel(dbos: DBOS, config: ConfigFile) -> None:
|
|
50
|
+
|
|
51
|
+
steps_completed = 0
|
|
52
|
+
|
|
53
|
+
@DBOS.step()
|
|
54
|
+
def step_one() -> None:
|
|
55
|
+
nonlocal steps_completed
|
|
56
|
+
steps_completed += 1
|
|
57
|
+
print("Step one completed!")
|
|
58
|
+
|
|
59
|
+
@DBOS.step()
|
|
60
|
+
def step_two() -> None:
|
|
61
|
+
nonlocal steps_completed
|
|
62
|
+
steps_completed += 1
|
|
63
|
+
print("Step two completed!")
|
|
64
|
+
|
|
65
|
+
@DBOS.workflow()
|
|
66
|
+
def simple_workflow() -> None:
|
|
67
|
+
step_one()
|
|
68
|
+
dbos.sleep(2)
|
|
69
|
+
step_two()
|
|
70
|
+
print("Executed Simple workflow")
|
|
71
|
+
return
|
|
72
|
+
|
|
73
|
+
# run the workflow
|
|
74
|
+
try:
|
|
75
|
+
wfuuid = str(uuid.uuid4())
|
|
76
|
+
with SetWorkflowID(wfuuid):
|
|
77
|
+
simple_workflow()
|
|
78
|
+
|
|
79
|
+
dbos.cancel_workflow(wfuuid)
|
|
80
|
+
except Exception as e:
|
|
81
|
+
# time.sleep(1) # wait for the workflow to complete
|
|
82
|
+
assert (
|
|
83
|
+
steps_completed == 1
|
|
84
|
+
), f"Expected steps_completed to be 1, but got {steps_completed}"
|
|
85
|
+
|
|
86
|
+
dbos.resume_workflow(wfuuid)
|
|
87
|
+
time.sleep(1)
|
|
88
|
+
|
|
89
|
+
assert (
|
|
90
|
+
steps_completed == 2
|
|
91
|
+
), f"Expected steps_completed to be 2, but got {steps_completed}"
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def test_two_transactions_cancel(dbos: DBOS, config: ConfigFile) -> None:
|
|
95
|
+
|
|
96
|
+
tr_completed = 0
|
|
97
|
+
|
|
98
|
+
@DBOS.transaction()
|
|
99
|
+
def transaction_one() -> None:
|
|
100
|
+
nonlocal tr_completed
|
|
101
|
+
tr_completed += 1
|
|
102
|
+
print("Transaction one completed!")
|
|
103
|
+
|
|
104
|
+
@DBOS.transaction()
|
|
105
|
+
def transaction_two() -> None:
|
|
106
|
+
nonlocal tr_completed
|
|
107
|
+
tr_completed += 1
|
|
108
|
+
print("Step two completed!")
|
|
109
|
+
|
|
110
|
+
@DBOS.workflow()
|
|
111
|
+
def simple_workflow() -> None:
|
|
112
|
+
transaction_one()
|
|
113
|
+
dbos.sleep(2)
|
|
114
|
+
transaction_two()
|
|
115
|
+
print("Executed Simple workflow")
|
|
116
|
+
return
|
|
117
|
+
|
|
118
|
+
# run the workflow
|
|
119
|
+
try:
|
|
120
|
+
wfuuid = str(uuid.uuid4())
|
|
121
|
+
with SetWorkflowID(wfuuid):
|
|
122
|
+
simple_workflow()
|
|
123
|
+
|
|
124
|
+
dbos.cancel_workflow(wfuuid)
|
|
125
|
+
except Exception as e:
|
|
126
|
+
# time.sleep(1) # wait for the workflow to complete
|
|
127
|
+
assert (
|
|
128
|
+
tr_completed == 1
|
|
129
|
+
), f"Expected tr_completed to be 1, but got {tr_completed}"
|
|
130
|
+
|
|
131
|
+
dbos.resume_workflow(wfuuid)
|
|
132
|
+
time.sleep(1)
|
|
133
|
+
|
|
134
|
+
assert (
|
|
135
|
+
tr_completed == 2
|
|
136
|
+
), f"Expected steps_completed to be 2, but got {tr_completed}"
|
|
137
|
+
|
|
138
|
+
# resume it a 2nd time
|
|
139
|
+
|
|
140
|
+
dbos.resume_workflow(wfuuid)
|
|
141
|
+
time.sleep(1)
|
|
142
|
+
|
|
143
|
+
assert (
|
|
144
|
+
tr_completed == 2
|
|
145
|
+
), f"Expected steps_completed to be 2, but got {tr_completed}"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py
RENAMED
|
File without changes
|
|
File without changes
|
{dbos-0.22.0a11 → dbos-0.23.0a2}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|