dbos 0.22.0a1__tar.gz → 0.22.0a2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (90) hide show
  1. {dbos-0.22.0a1 → dbos-0.22.0a2}/PKG-INFO +1 -1
  2. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_core.py +37 -81
  3. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_registrations.py +12 -7
  4. {dbos-0.22.0a1 → dbos-0.22.0a2}/pyproject.toml +1 -1
  5. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/conftest.py +18 -1
  6. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_classdecorators.py +427 -8
  7. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_queue.py +1 -18
  8. {dbos-0.22.0a1 → dbos-0.22.0a2}/LICENSE +0 -0
  9. {dbos-0.22.0a1 → dbos-0.22.0a2}/README.md +0 -0
  10. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/__init__.py +0 -0
  11. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_admin_server.py +0 -0
  12. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_app_db.py +0 -0
  13. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_classproperty.py +0 -0
  14. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_cloudutils/authentication.py +0 -0
  15. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_cloudutils/cloudutils.py +0 -0
  16. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_cloudutils/databases.py +0 -0
  17. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_context.py +0 -0
  18. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_croniter.py +0 -0
  19. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_db_wizard.py +0 -0
  20. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_dbos.py +0 -0
  21. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_dbos_config.py +0 -0
  22. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_error.py +0 -0
  23. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_fastapi.py +0 -0
  24. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_flask.py +0 -0
  25. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_kafka.py +0 -0
  26. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_kafka_message.py +0 -0
  27. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_logger.py +0 -0
  28. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_migrations/env.py +0 -0
  29. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_migrations/script.py.mako +0 -0
  30. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  31. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  32. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  33. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  34. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  35. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  36. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  37. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_outcome.py +0 -0
  38. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_queue.py +0 -0
  39. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_recovery.py +0 -0
  40. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_request.py +0 -0
  41. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_roles.py +0 -0
  42. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_scheduler.py +0 -0
  43. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_schemas/__init__.py +0 -0
  44. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_schemas/application_database.py +0 -0
  45. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_schemas/system_database.py +0 -0
  46. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_serialization.py +0 -0
  47. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_sys_db.py +0 -0
  48. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_templates/dbos-db-starter/README.md +0 -0
  49. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  50. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_templates/dbos-db-starter/__package/main.py +0 -0
  51. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  52. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  53. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  54. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  55. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  56. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  57. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  58. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_tracer.py +0 -0
  59. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/_workflow_commands.py +0 -0
  60. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/cli/_github_init.py +0 -0
  61. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/cli/_template_init.py +0 -0
  62. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/cli/cli.py +0 -0
  63. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/dbos-config.schema.json +0 -0
  64. {dbos-0.22.0a1 → dbos-0.22.0a2}/dbos/py.typed +0 -0
  65. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/__init__.py +0 -0
  66. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/atexit_no_ctor.py +0 -0
  67. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/atexit_no_launch.py +0 -0
  68. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/classdefs.py +0 -0
  69. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/more_classdefs.py +0 -0
  70. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/queuedworkflow.py +0 -0
  71. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_admin_server.py +0 -0
  72. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_async.py +0 -0
  73. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_concurrency.py +0 -0
  74. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_config.py +0 -0
  75. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_croniter.py +0 -0
  76. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_dbos.py +0 -0
  77. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_failures.py +0 -0
  78. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_fastapi.py +0 -0
  79. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_fastapi_roles.py +0 -0
  80. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_flask.py +0 -0
  81. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_kafka.py +0 -0
  82. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_outcome.py +0 -0
  83. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_package.py +0 -0
  84. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_scheduler.py +0 -0
  85. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_schema_migration.py +0 -0
  86. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_singleton.py +0 -0
  87. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_spans.py +0 -0
  88. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_sqlalchemy.py +0 -0
  89. {dbos-0.22.0a1 → dbos-0.22.0a2}/tests/test_workflow_cmds.py +0 -0
  90. {dbos-0.22.0a1 → dbos-0.22.0a2}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.22.0a1
3
+ Version: 0.22.0a2
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -63,6 +63,7 @@ from ._registrations import (
63
63
  get_or_create_func_info,
64
64
  get_temp_workflow_type,
65
65
  set_dbos_func_name,
66
+ set_func_info,
66
67
  set_temp_workflow_type,
67
68
  )
68
69
  from ._roles import check_required_roles
@@ -286,6 +287,7 @@ def execute_workflow_by_id(
286
287
  ctx.request = (
287
288
  _serialization.deserialize(request) if request is not None else None
288
289
  )
290
+ # If this function belongs to a configured class, add that class instance as its first argument
289
291
  if status["config_name"] is not None:
290
292
  config_name = status["config_name"]
291
293
  class_name = status["class_name"]
@@ -295,28 +297,9 @@ def execute_workflow_by_id(
295
297
  workflow_id,
296
298
  f"Cannot execute workflow because instance '{iname}' is not registered",
297
299
  )
298
-
299
- if startNew:
300
- return start_workflow(
301
- dbos,
302
- wf_func,
303
- status["queue_name"],
304
- True,
305
- dbos._registry.instance_info_map[iname],
306
- *inputs["args"],
307
- **inputs["kwargs"],
308
- )
309
- else:
310
- with SetWorkflowID(workflow_id):
311
- return start_workflow(
312
- dbos,
313
- wf_func,
314
- status["queue_name"],
315
- True,
316
- dbos._registry.instance_info_map[iname],
317
- *inputs["args"],
318
- **inputs["kwargs"],
319
- )
300
+ class_instance = dbos._registry.instance_info_map[iname]
301
+ inputs["args"] = (class_instance,) + inputs["args"]
302
+ # If this function is a class method, add that class object as its first argument
320
303
  elif status["class_name"] is not None:
321
304
  class_name = status["class_name"]
322
305
  if class_name not in dbos._registry.class_info_map:
@@ -324,30 +307,20 @@ def execute_workflow_by_id(
324
307
  workflow_id,
325
308
  f"Cannot execute workflow because class '{class_name}' is not registered",
326
309
  )
310
+ class_object = dbos._registry.class_info_map[class_name]
311
+ inputs["args"] = (class_object,) + inputs["args"]
327
312
 
328
- if startNew:
329
- return start_workflow(
330
- dbos,
331
- wf_func,
332
- status["queue_name"],
333
- True,
334
- dbos._registry.class_info_map[class_name],
335
- *inputs["args"],
336
- **inputs["kwargs"],
337
- )
338
- else:
339
- with SetWorkflowID(workflow_id):
340
- return start_workflow(
341
- dbos,
342
- wf_func,
343
- status["queue_name"],
344
- True,
345
- dbos._registry.class_info_map[class_name],
346
- *inputs["args"],
347
- **inputs["kwargs"],
348
- )
313
+ if startNew:
314
+ return start_workflow(
315
+ dbos,
316
+ wf_func,
317
+ status["queue_name"],
318
+ True,
319
+ *inputs["args"],
320
+ **inputs["kwargs"],
321
+ )
349
322
  else:
350
- if startNew:
323
+ with SetWorkflowID(workflow_id):
351
324
  return start_workflow(
352
325
  dbos,
353
326
  wf_func,
@@ -356,16 +329,6 @@ def execute_workflow_by_id(
356
329
  *inputs["args"],
357
330
  **inputs["kwargs"],
358
331
  )
359
- else:
360
- with SetWorkflowID(workflow_id):
361
- return start_workflow(
362
- dbos,
363
- wf_func,
364
- status["queue_name"],
365
- True,
366
- *inputs["args"],
367
- **inputs["kwargs"],
368
- )
369
332
 
370
333
 
371
334
  @overload
@@ -398,9 +361,12 @@ def start_workflow(
398
361
  *args: P.args,
399
362
  **kwargs: P.kwargs,
400
363
  ) -> "WorkflowHandle[R]":
364
+ # If the function has a class, add the class object as its first argument
401
365
  fself: Optional[object] = None
402
366
  if hasattr(func, "__self__"):
403
367
  fself = func.__self__
368
+ if fself is not None:
369
+ args = (fself,) + args # type: ignore
404
370
 
405
371
  fi = get_func_info(func)
406
372
  if fi is None:
@@ -436,17 +402,13 @@ def start_workflow(
436
402
  new_wf_ctx.id_assigned_for_next_workflow = new_wf_ctx.assign_workflow_id()
437
403
  new_wf_id = new_wf_ctx.id_assigned_for_next_workflow
438
404
 
439
- gin_args: Tuple[Any, ...] = args
440
- if fself is not None:
441
- gin_args = (fself,)
442
-
443
405
  status = _init_workflow(
444
406
  dbos,
445
407
  new_wf_ctx,
446
408
  inputs=inputs,
447
409
  wf_name=get_dbos_func_name(func),
448
- class_name=get_dbos_class_name(fi, func, gin_args),
449
- config_name=get_config_name(fi, func, gin_args),
410
+ class_name=get_dbos_class_name(fi, func, args),
411
+ config_name=get_config_name(fi, func, args),
450
412
  temp_wf_type=get_temp_workflow_type(func),
451
413
  queue=queue_name,
452
414
  max_recovery_attempts=fi.max_recovery_attempts,
@@ -464,27 +426,15 @@ def start_workflow(
464
426
  )
465
427
  return WorkflowHandlePolling(new_wf_id, dbos)
466
428
 
467
- if fself is not None:
468
- future = dbos._executor.submit(
469
- cast(Callable[..., R], _execute_workflow_wthread),
470
- dbos,
471
- status,
472
- func,
473
- new_wf_ctx,
474
- fself,
475
- *args,
476
- **kwargs,
477
- )
478
- else:
479
- future = dbos._executor.submit(
480
- cast(Callable[..., R], _execute_workflow_wthread),
481
- dbos,
482
- status,
483
- func,
484
- new_wf_ctx,
485
- *args,
486
- **kwargs,
487
- )
429
+ future = dbos._executor.submit(
430
+ cast(Callable[..., R], _execute_workflow_wthread),
431
+ dbos,
432
+ status,
433
+ func,
434
+ new_wf_ctx,
435
+ *args,
436
+ **kwargs,
437
+ )
488
438
  return WorkflowHandleFuture(new_wf_id, future, dbos)
489
439
 
490
440
 
@@ -516,6 +466,8 @@ def workflow_wrapper(
516
466
 
517
467
  @wraps(func)
518
468
  def wrapper(*args: Any, **kwargs: Any) -> R:
469
+ fi = get_func_info(func)
470
+ assert fi is not None
519
471
  if dbosreg.dbos is None:
520
472
  raise DBOSException(
521
473
  f"Function {func.__name__} invoked before DBOS initialized"
@@ -726,6 +678,8 @@ def decorate_transaction(
726
678
  set_temp_workflow_type(temp_wf, "transaction")
727
679
  dbosreg.register_wf_function(get_dbos_func_name(temp_wf), wrapped_wf)
728
680
  wrapper.__orig_func = temp_wf # type: ignore
681
+ set_func_info(wrapped_wf, get_or_create_func_info(func))
682
+ set_func_info(temp_wf, get_or_create_func_info(func))
729
683
 
730
684
  return cast(F, wrapper)
731
685
 
@@ -875,6 +829,8 @@ def decorate_step(
875
829
  set_temp_workflow_type(temp_wf, "step")
876
830
  dbosreg.register_wf_function(get_dbos_func_name(temp_wf), wrapped_wf)
877
831
  wrapper.__orig_func = temp_wf # type: ignore
832
+ set_func_info(wrapped_wf, get_or_create_func_info(func))
833
+ set_func_info(temp_wf, get_or_create_func_info(func))
878
834
 
879
835
  return cast(Callable[P, R], wrapper)
880
836
 
@@ -1,4 +1,5 @@
1
1
  import inspect
2
+ from dataclasses import dataclass
2
3
  from enum import Enum
3
4
  from types import FunctionType
4
5
  from typing import Any, Callable, List, Literal, Optional, Tuple, Type, cast
@@ -31,9 +32,9 @@ def set_temp_workflow_type(f: Any, name: TempWorkflowType) -> None:
31
32
  setattr(f, "dbos_temp_workflow_type", name)
32
33
 
33
34
 
35
+ @dataclass
34
36
  class DBOSClassInfo:
35
- def __init__(self) -> None:
36
- self.def_required_roles: Optional[List[str]] = None
37
+ def_required_roles: Optional[List[str]] = None
37
38
 
38
39
 
39
40
  class DBOSFuncType(Enum):
@@ -44,12 +45,12 @@ class DBOSFuncType(Enum):
44
45
  Instance = 4
45
46
 
46
47
 
48
+ @dataclass
47
49
  class DBOSFuncInfo:
48
- def __init__(self) -> None:
49
- self.class_info: Optional[DBOSClassInfo] = None
50
- self.func_type: DBOSFuncType = DBOSFuncType.Unknown
51
- self.required_roles: Optional[List[str]] = None
52
- self.max_recovery_attempts = DEFAULT_MAX_RECOVERY_ATTEMPTS
50
+ class_info: Optional[DBOSClassInfo] = None
51
+ func_type: DBOSFuncType = DBOSFuncType.Unknown
52
+ required_roles: Optional[List[str]] = None
53
+ max_recovery_attempts: int = DEFAULT_MAX_RECOVERY_ATTEMPTS
53
54
 
54
55
 
55
56
  def get_or_create_class_info(cls: Type[Any]) -> DBOSClassInfo:
@@ -110,6 +111,10 @@ def get_or_create_func_info(func: Callable[..., Any]) -> DBOSFuncInfo:
110
111
  return fi
111
112
 
112
113
 
114
+ def set_func_info(func: Callable[..., Any], fi: DBOSFuncInfo) -> None:
115
+ setattr(func, "dbos_func_decorator_info", fi)
116
+
117
+
113
118
  def get_class_info(cls: Type[Any]) -> Optional[DBOSClassInfo]:
114
119
  if hasattr(cls, "dbos_class_decorator_info"):
115
120
  ci: DBOSClassInfo = getattr(cls, "dbos_class_decorator_info")
@@ -27,7 +27,7 @@ dependencies = [
27
27
  ]
28
28
  requires-python = ">=3.9"
29
29
  readme = "README.md"
30
- version = "0.22.0a1"
30
+ version = "0.22.0a2"
31
31
 
32
32
  [project.license]
33
33
  text = "MIT"
@@ -1,7 +1,7 @@
1
1
  import glob
2
2
  import os
3
3
  import subprocess
4
- import warnings
4
+ import time
5
5
  from typing import Any, Generator, Tuple
6
6
 
7
7
  import pytest
@@ -10,6 +10,7 @@ from fastapi import FastAPI
10
10
  from flask import Flask
11
11
 
12
12
  from dbos import DBOS, ConfigFile
13
+ from dbos._schemas.system_database import SystemSchema
13
14
 
14
15
 
15
16
  @pytest.fixture(scope="session")
@@ -149,3 +150,19 @@ def dbos_flask(
149
150
  def pytest_collection_modifyitems(session: Any, config: Any, items: Any) -> None:
150
151
  for item in items:
151
152
  item._nodeid = "\n" + item.nodeid + "\n"
153
+
154
+
155
+ def queue_entries_are_cleaned_up(dbos: DBOS) -> bool:
156
+ max_tries = 10
157
+ success = False
158
+ for i in range(max_tries):
159
+ with dbos._sys_db.engine.begin() as c:
160
+ query = sa.select(sa.func.count()).select_from(SystemSchema.workflow_queue)
161
+ row = c.execute(query).fetchone()
162
+ assert row is not None
163
+ count = row[0]
164
+ if count == 0:
165
+ success = True
166
+ break
167
+ time.sleep(1)
168
+ return success
@@ -1,13 +1,16 @@
1
- from typing import Optional
1
+ import threading
2
+ import uuid
3
+ from typing import Callable, Optional
2
4
 
3
5
  import pytest
4
6
  import sqlalchemy as sa
5
7
 
6
8
  # Public API
7
- from dbos import DBOS, DBOSConfiguredInstance, SetWorkflowID
9
+ from dbos import DBOS, DBOSConfiguredInstance, Queue, SetWorkflowID
8
10
 
9
11
  # Private API used because this is a test
10
12
  from dbos._context import DBOSContextEnsure, assert_current_dbos_context
13
+ from tests.conftest import queue_entries_are_cleaned_up
11
14
 
12
15
 
13
16
  def test_required_roles(dbos: DBOS) -> None:
@@ -414,13 +417,14 @@ def test_class_recovery(dbos: DBOS) -> None:
414
417
 
415
418
 
416
419
  def test_inst_recovery(dbos: DBOS) -> None:
420
+ wfid = str(uuid.uuid4())
417
421
  exc_cnt: int = 0
418
- last_inst: Optional[DBOSTestInstRec] = None
422
+ last_inst: Optional[TestClass] = None
419
423
 
420
424
  @DBOS.dbos_class()
421
- class DBOSTestInstRec(DBOSConfiguredInstance):
425
+ class TestClass(DBOSConfiguredInstance):
422
426
  def __init__(self) -> None:
423
- super().__init__("bob")
427
+ super().__init__("test_class")
424
428
 
425
429
  @DBOS.workflow()
426
430
  def check_inst(self, arg1: str) -> str:
@@ -431,8 +435,8 @@ def test_inst_recovery(dbos: DBOS) -> None:
431
435
  last_inst = self
432
436
  return "ran2"
433
437
 
434
- inst = DBOSTestInstRec()
435
- with SetWorkflowID("run2"):
438
+ inst = TestClass()
439
+ with SetWorkflowID(wfid):
436
440
  assert "ran2" == inst.check_inst("arg1")
437
441
 
438
442
  assert exc_cnt == 1
@@ -440,7 +444,422 @@ def test_inst_recovery(dbos: DBOS) -> None:
440
444
 
441
445
  # Test we can execute the workflow by uuid as recovery would do
442
446
  last_inst = None
443
- handle = DBOS.execute_workflow_id("run2")
447
+ handle = DBOS.execute_workflow_id(wfid)
444
448
  assert handle.get_result() == "ran2"
445
449
  assert exc_cnt == 2
446
450
  assert last_inst is inst
451
+
452
+ status = DBOS.retrieve_workflow(wfid).get_status()
453
+ assert status.class_name == "TestClass"
454
+ assert status.config_name == "test_class"
455
+
456
+
457
+ def test_inst_async_recovery(dbos: DBOS) -> None:
458
+ wfid = str(uuid.uuid4())
459
+ event = threading.Event()
460
+
461
+ @DBOS.dbos_class()
462
+ class TestClass(DBOSConfiguredInstance):
463
+
464
+ def __init__(self, multiplier: int) -> None:
465
+ self.multiply: Callable[[int], int] = lambda x: x * multiplier
466
+ super().__init__("test_class")
467
+
468
+ @DBOS.workflow()
469
+ def workflow(self, x: int) -> int:
470
+ event.wait()
471
+ return self.multiply(x)
472
+
473
+ input = 2
474
+ multiplier = 5
475
+ inst = TestClass(multiplier)
476
+
477
+ with SetWorkflowID(wfid):
478
+ orig_handle = DBOS.start_workflow(inst.workflow, input)
479
+
480
+ status = orig_handle.get_status()
481
+ assert status.class_name == "TestClass"
482
+ assert status.config_name == "test_class"
483
+
484
+ recovery_handle = DBOS.execute_workflow_id(wfid)
485
+
486
+ event.set()
487
+ assert orig_handle.get_result() == input * multiplier
488
+ assert recovery_handle.get_result() == input * multiplier
489
+
490
+
491
+ def test_inst_async_step_recovery(dbos: DBOS) -> None:
492
+ wfid = str(uuid.uuid4())
493
+ event = threading.Event()
494
+
495
+ @DBOS.dbos_class()
496
+ class TestClass(DBOSConfiguredInstance):
497
+
498
+ def __init__(self, multiplier: int) -> None:
499
+ self.multiply: Callable[[int], int] = lambda x: x * multiplier
500
+ super().__init__("test_class")
501
+
502
+ @DBOS.step()
503
+ def step(self, x: int) -> int:
504
+ event.wait()
505
+ return self.multiply(x)
506
+
507
+ input = 2
508
+ multiplier = 5
509
+ inst = TestClass(multiplier)
510
+
511
+ with SetWorkflowID(wfid):
512
+ orig_handle = DBOS.start_workflow(inst.step, input)
513
+
514
+ status = orig_handle.get_status()
515
+ assert status.class_name == "TestClass"
516
+ assert status.config_name == "test_class"
517
+
518
+ recovery_handle = DBOS.execute_workflow_id(wfid)
519
+
520
+ event.set()
521
+ assert orig_handle.get_result() == input * multiplier
522
+ assert recovery_handle.get_result() == input * multiplier
523
+
524
+
525
+ def test_step_recovery(dbos: DBOS) -> None:
526
+ wfid = str(uuid.uuid4())
527
+ thread_event = threading.Event()
528
+ blocking_event = threading.Event()
529
+ return_value = None
530
+
531
+ @DBOS.dbos_class()
532
+ class TestClass(DBOSConfiguredInstance):
533
+
534
+ def __init__(self, multiplier: int) -> None:
535
+ self.multiply: Callable[[int], int] = lambda x: x * multiplier
536
+ super().__init__("test_class")
537
+
538
+ @DBOS.step()
539
+ def step(self, x: int) -> int:
540
+ thread_event.set()
541
+ blocking_event.wait()
542
+ return self.multiply(x)
543
+
544
+ input = 2
545
+ multiplier = 5
546
+ inst = TestClass(multiplier)
547
+
548
+ # We're testing synchronously calling the step, but need to do so
549
+ # asynchronously. Hence, a thread.
550
+ def call_step() -> None:
551
+ with SetWorkflowID(wfid):
552
+ nonlocal return_value
553
+ return_value = inst.step(input)
554
+
555
+ thread = threading.Thread(target=call_step)
556
+ thread.start()
557
+ thread_event.wait()
558
+
559
+ status = DBOS.retrieve_workflow(wfid).get_status()
560
+ assert status.class_name == "TestClass"
561
+ assert status.config_name == "test_class"
562
+
563
+ recovery_handle = DBOS.execute_workflow_id(wfid)
564
+
565
+ blocking_event.set()
566
+ thread.join()
567
+ assert return_value == input * multiplier
568
+ assert recovery_handle.get_result() == input * multiplier
569
+
570
+
571
+ def test_class_queue_recovery(dbos: DBOS) -> None:
572
+ step_counter: int = 0
573
+ queued_steps = 5
574
+ multiplier = 5
575
+
576
+ wfid = str(uuid.uuid4())
577
+ queue = Queue("test_queue")
578
+ step_events = [threading.Event() for _ in range(queued_steps)]
579
+ event = threading.Event()
580
+
581
+ @DBOS.dbos_class()
582
+ class TestClass(DBOSConfiguredInstance):
583
+ def __init__(self, multiplier: int) -> None:
584
+ self.multiply: Callable[[int], int] = lambda x: x * multiplier
585
+ super().__init__("test_class")
586
+
587
+ @DBOS.workflow()
588
+ def test_workflow(self) -> list[int]:
589
+ assert DBOS.workflow_id == wfid
590
+ handles = []
591
+ for i in range(queued_steps):
592
+ h = queue.enqueue(self.test_step, i)
593
+ handles.append(h)
594
+ return [h.get_result() for h in handles]
595
+
596
+ @DBOS.step()
597
+ def test_step(self, i: int) -> int:
598
+ nonlocal step_counter
599
+ step_counter += 1
600
+ step_events[i].set()
601
+ event.wait()
602
+ return self.multiply(i)
603
+
604
+ inst = TestClass(multiplier)
605
+
606
+ # Start the workflow. Wait for all five steps to start. Verify that they started.
607
+ with SetWorkflowID(wfid):
608
+ original_handle = DBOS.start_workflow(inst.test_workflow)
609
+ for e in step_events:
610
+ e.wait()
611
+ e.clear()
612
+
613
+ assert step_counter == 5
614
+
615
+ # Recover the workflow, then resume it.
616
+ recovery_handles = DBOS.recover_pending_workflows()
617
+ # Wait until the 2nd invocation of the workflows are dequeued and executed
618
+ for e in step_events:
619
+ e.wait()
620
+ event.set()
621
+
622
+ # There should be one handle for the workflow and another for each queued step.
623
+ assert len(recovery_handles) == queued_steps + 1
624
+ # Verify that both the recovered and original workflows complete correctly.
625
+ result = [i * multiplier for i in range(5)]
626
+ for h in recovery_handles:
627
+ status = h.get_status()
628
+ assert status.class_name == "TestClass"
629
+ assert status.config_name == "test_class"
630
+ if h.get_workflow_id() == wfid:
631
+ assert h.get_result() == result
632
+ assert original_handle.get_result() == result
633
+ # Each step should start twice, once originally and once in recovery.
634
+ assert step_counter == 10
635
+
636
+ # Rerun the workflow. Because each step is complete, none should start again.
637
+ with SetWorkflowID(wfid):
638
+ assert inst.test_workflow() == result
639
+ assert step_counter == 10
640
+
641
+ # Verify all queue entries eventually get cleaned up.
642
+ assert queue_entries_are_cleaned_up(dbos)
643
+
644
+
645
+ def test_class_static_queue_recovery(dbos: DBOS) -> None:
646
+ step_counter: int = 0
647
+ queued_steps = 5
648
+
649
+ wfid = str(uuid.uuid4())
650
+ queue = Queue("test_queue")
651
+ step_events = [threading.Event() for _ in range(queued_steps)]
652
+ event = threading.Event()
653
+
654
+ @DBOS.dbos_class()
655
+ class TestClass:
656
+ @staticmethod
657
+ @DBOS.workflow()
658
+ def test_workflow() -> list[int]:
659
+ assert DBOS.workflow_id == wfid
660
+ handles = []
661
+ for i in range(queued_steps):
662
+ h = queue.enqueue(TestClass.test_step, i)
663
+ handles.append(h)
664
+ return [h.get_result() for h in handles]
665
+
666
+ @staticmethod
667
+ @DBOS.step()
668
+ def test_step(i: int) -> int:
669
+ nonlocal step_counter
670
+ step_counter += 1
671
+ step_events[i].set()
672
+ event.wait()
673
+ return i
674
+
675
+ # Start the workflow. Wait for all five steps to start. Verify that they started.
676
+ with SetWorkflowID(wfid):
677
+ original_handle = DBOS.start_workflow(TestClass.test_workflow)
678
+ for e in step_events:
679
+ e.wait()
680
+ e.clear()
681
+
682
+ assert step_counter == 5
683
+
684
+ # Recover the workflow, then resume it.
685
+ recovery_handles = DBOS.recover_pending_workflows()
686
+ # Wait until the 2nd invocation of the workflows are dequeued and executed
687
+ for e in step_events:
688
+ e.wait()
689
+ event.set()
690
+
691
+ # There should be one handle for the workflow and another for each queued step.
692
+ assert len(recovery_handles) == queued_steps + 1
693
+ # Verify that both the recovered and original workflows complete correctly.
694
+ result = [i for i in range(5)]
695
+ for h in recovery_handles:
696
+ status = h.get_status()
697
+ # Class name is not recorded for static methods
698
+ assert status.class_name == None
699
+ assert status.config_name == None
700
+ if h.get_workflow_id() == wfid:
701
+ assert h.get_result() == result
702
+ assert original_handle.get_result() == result
703
+ # Each step should start twice, once originally and once in recovery.
704
+ assert step_counter == 10
705
+
706
+ # Rerun the workflow. Because each step is complete, none should start again.
707
+ with SetWorkflowID(wfid):
708
+ assert TestClass.test_workflow() == result
709
+ assert step_counter == 10
710
+
711
+ # Verify all queue entries eventually get cleaned up.
712
+ assert queue_entries_are_cleaned_up(dbos)
713
+
714
+
715
+ def test_class_classmethod_queue_recovery(dbos: DBOS) -> None:
716
+ step_counter: int = 0
717
+ multiplier = 5
718
+ queued_steps = 5
719
+
720
+ wfid = str(uuid.uuid4())
721
+ queue = Queue("test_queue")
722
+ step_events = [threading.Event() for _ in range(queued_steps)]
723
+ event = threading.Event()
724
+
725
+ @DBOS.dbos_class()
726
+ class TestClass:
727
+ multiply: Callable[[int], int] = lambda _: 0
728
+
729
+ @classmethod
730
+ @DBOS.workflow()
731
+ def test_workflow(cls) -> list[int]:
732
+ cls.multiply = lambda x: x * multiplier
733
+ assert DBOS.workflow_id == wfid
734
+ handles = []
735
+ for i in range(queued_steps):
736
+ h = queue.enqueue(TestClass.test_step, i)
737
+ handles.append(h)
738
+ return [h.get_result() for h in handles]
739
+
740
+ @classmethod
741
+ @DBOS.step()
742
+ def test_step(cls, i: int) -> int:
743
+ nonlocal step_counter
744
+ step_counter += 1
745
+ step_events[i].set()
746
+ event.wait()
747
+ return cls.multiply(i)
748
+
749
+ # Start the workflow. Wait for all five steps to start. Verify that they started.
750
+ with SetWorkflowID(wfid):
751
+ original_handle = DBOS.start_workflow(TestClass.test_workflow)
752
+ for e in step_events:
753
+ e.wait()
754
+ e.clear()
755
+
756
+ assert step_counter == 5
757
+
758
+ # Recover the workflow, then resume it.
759
+ recovery_handles = DBOS.recover_pending_workflows()
760
+ # Wait until the 2nd invocation of the workflows are dequeued and executed
761
+ for e in step_events:
762
+ e.wait()
763
+ event.set()
764
+
765
+ # There should be one handle for the workflow and another for each queued step.
766
+ assert len(recovery_handles) == queued_steps + 1
767
+ # Verify that both the recovered and original workflows complete correctly.
768
+ result = [i * multiplier for i in range(5)]
769
+ for h in recovery_handles:
770
+ status = h.get_status()
771
+ # Class name is recorded for class methods
772
+ assert status.class_name == "TestClass"
773
+ assert status.config_name == None
774
+ if h.get_workflow_id() == wfid:
775
+ assert h.get_result() == result
776
+ assert original_handle.get_result() == result
777
+ # Each step should start twice, once originally and once in recovery.
778
+ assert step_counter == 10
779
+
780
+ # Rerun the workflow. Because each step is complete, none should start again.
781
+ with SetWorkflowID(wfid):
782
+ assert TestClass.test_workflow() == result
783
+ assert step_counter == 10
784
+
785
+ # Verify all queue entries eventually get cleaned up.
786
+ assert queue_entries_are_cleaned_up(dbos)
787
+
788
+
789
+ def test_inst_txn(dbos: DBOS) -> None:
790
+ wfid = str(uuid.uuid4())
791
+
792
+ @DBOS.dbos_class()
793
+ class TestClass(DBOSConfiguredInstance):
794
+
795
+ def __init__(self, multiplier: int) -> None:
796
+ self.multiply: Callable[[int], int] = lambda x: x * multiplier
797
+ super().__init__("test_class")
798
+
799
+ @DBOS.transaction()
800
+ def transaction(self, x: int) -> int:
801
+ return self.multiply(x)
802
+
803
+ input = 2
804
+ multiplier = 5
805
+ inst = TestClass(multiplier)
806
+
807
+ with SetWorkflowID(wfid):
808
+ assert inst.transaction(input) == input * multiplier
809
+ dbos._sys_db.wait_for_buffer_flush()
810
+ status = DBOS.retrieve_workflow(wfid).get_status()
811
+ assert status.class_name == "TestClass"
812
+ assert status.config_name == "test_class"
813
+
814
+ handle = DBOS.start_workflow(inst.transaction, input)
815
+ assert handle.get_result() == input * multiplier
816
+ dbos._sys_db.wait_for_buffer_flush()
817
+ status = handle.get_status()
818
+ assert status.class_name == "TestClass"
819
+ assert status.config_name == "test_class"
820
+
821
+
822
+ def test_mixed_methods(dbos: DBOS) -> None:
823
+
824
+ @DBOS.dbos_class()
825
+ class TestClass(DBOSConfiguredInstance):
826
+
827
+ def __init__(self, multiplier: int) -> None:
828
+ self.multiply: Callable[[int], int] = lambda x: x * multiplier
829
+ super().__init__("test_class")
830
+
831
+ @DBOS.workflow()
832
+ def instance_workflow(self, x: int) -> int:
833
+ return self.multiply(x)
834
+
835
+ @classmethod
836
+ @DBOS.workflow()
837
+ def classmethod_workflow(cls, x: int) -> int:
838
+ return x
839
+
840
+ @staticmethod
841
+ @DBOS.workflow()
842
+ def staticmethod_workflow(x: int) -> int:
843
+ return x
844
+
845
+ input = 2
846
+ multiplier = 5
847
+ inst = TestClass(multiplier)
848
+
849
+ handle = DBOS.start_workflow(inst.instance_workflow, input)
850
+ assert handle.get_result() == input * multiplier
851
+ status = handle.get_status()
852
+ assert status.class_name == "TestClass"
853
+ assert status.config_name == "test_class"
854
+
855
+ handle = DBOS.start_workflow(inst.classmethod_workflow, input)
856
+ assert handle.get_result() == input
857
+ status = handle.get_status()
858
+ assert status.class_name == "TestClass"
859
+ assert status.config_name == None
860
+
861
+ handle = DBOS.start_workflow(inst.staticmethod_workflow, input)
862
+ assert handle.get_result() == input
863
+ status = handle.get_status()
864
+ assert status.class_name == None
865
+ assert status.config_name == None
@@ -17,26 +17,9 @@ from dbos import (
17
17
  SetWorkflowID,
18
18
  WorkflowHandle,
19
19
  )
20
- from dbos._error import DBOSDeadLetterQueueError
21
20
  from dbos._schemas.system_database import SystemSchema
22
21
  from dbos._sys_db import WorkflowStatusString
23
- from tests.conftest import default_config
24
-
25
-
26
- def queue_entries_are_cleaned_up(dbos: DBOS) -> bool:
27
- max_tries = 10
28
- success = False
29
- for i in range(max_tries):
30
- with dbos._sys_db.engine.begin() as c:
31
- query = sa.select(sa.func.count()).select_from(SystemSchema.workflow_queue)
32
- row = c.execute(query).fetchone()
33
- assert row is not None
34
- count = row[0]
35
- if count == 0:
36
- success = True
37
- break
38
- time.sleep(1)
39
- return success
22
+ from tests.conftest import default_config, queue_entries_are_cleaned_up
40
23
 
41
24
 
42
25
  def test_simple_queue(dbos: DBOS) -> None:
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes