dbos 0.19.0a9__tar.gz → 0.20.0a2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (87) hide show
  1. {dbos-0.19.0a9 → dbos-0.20.0a2}/PKG-INFO +1 -1
  2. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_admin_server.py +45 -2
  3. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_core.py +40 -4
  4. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_dbos.py +19 -0
  5. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_fastapi.py +6 -2
  6. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_flask.py +6 -2
  7. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_workflow_commands.py +4 -5
  8. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/cli.py +46 -6
  9. {dbos-0.19.0a9 → dbos-0.20.0a2}/pyproject.toml +1 -1
  10. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_admin_server.py +119 -1
  11. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_fastapi.py +20 -1
  12. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_flask.py +20 -1
  13. {dbos-0.19.0a9 → dbos-0.20.0a2}/LICENSE +0 -0
  14. {dbos-0.19.0a9 → dbos-0.20.0a2}/README.md +0 -0
  15. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/__init__.py +0 -0
  16. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_app_db.py +0 -0
  17. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_classproperty.py +0 -0
  18. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_cloudutils/authentication.py +0 -0
  19. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_cloudutils/cloudutils.py +0 -0
  20. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_cloudutils/databases.py +0 -0
  21. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_context.py +0 -0
  22. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_croniter.py +0 -0
  23. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_db_wizard.py +0 -0
  24. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_dbos_config.py +0 -0
  25. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_error.py +0 -0
  26. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_kafka.py +0 -0
  27. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_kafka_message.py +0 -0
  28. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_logger.py +0 -0
  29. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_migrations/env.py +0 -0
  30. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_migrations/script.py.mako +0 -0
  31. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  32. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  33. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  34. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  35. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  36. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  37. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  38. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_outcome.py +0 -0
  39. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_queue.py +0 -0
  40. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_recovery.py +0 -0
  41. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_registrations.py +0 -0
  42. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_request.py +0 -0
  43. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_roles.py +0 -0
  44. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_scheduler.py +0 -0
  45. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_schemas/__init__.py +0 -0
  46. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_schemas/application_database.py +0 -0
  47. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_schemas/system_database.py +0 -0
  48. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_serialization.py +0 -0
  49. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_sys_db.py +0 -0
  50. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_templates/hello/README.md +0 -0
  51. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_templates/hello/__package/__init__.py +0 -0
  52. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_templates/hello/__package/main.py +0 -0
  53. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_templates/hello/__package/schema.py +0 -0
  54. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_templates/hello/alembic.ini +0 -0
  55. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_templates/hello/dbos-config.yaml.dbos +0 -0
  56. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_templates/hello/migrations/env.py.dbos +0 -0
  57. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_templates/hello/migrations/script.py.mako +0 -0
  58. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_templates/hello/migrations/versions/2024_07_31_180642_init.py +0 -0
  59. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_templates/hello/start_postgres_docker.py +0 -0
  60. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/_tracer.py +0 -0
  61. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/dbos-config.schema.json +0 -0
  62. {dbos-0.19.0a9 → dbos-0.20.0a2}/dbos/py.typed +0 -0
  63. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/__init__.py +0 -0
  64. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/atexit_no_ctor.py +0 -0
  65. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/atexit_no_launch.py +0 -0
  66. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/classdefs.py +0 -0
  67. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/conftest.py +0 -0
  68. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/more_classdefs.py +0 -0
  69. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/queuedworkflow.py +0 -0
  70. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_async.py +0 -0
  71. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_classdecorators.py +0 -0
  72. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_concurrency.py +0 -0
  73. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_config.py +0 -0
  74. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_croniter.py +0 -0
  75. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_dbos.py +0 -0
  76. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_failures.py +0 -0
  77. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_fastapi_roles.py +0 -0
  78. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_kafka.py +0 -0
  79. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_outcome.py +0 -0
  80. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_package.py +0 -0
  81. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_queue.py +0 -0
  82. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_scheduler.py +0 -0
  83. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_schema_migration.py +0 -0
  84. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_singleton.py +0 -0
  85. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_spans.py +0 -0
  86. {dbos-0.19.0a9 → dbos-0.20.0a2}/tests/test_workflow_cmds.py +0 -0
  87. {dbos-0.19.0a9 → dbos-0.20.0a2}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.19.0a9
3
+ Version: 0.20.0a2
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import json
4
+ import re
4
5
  import threading
5
6
  from functools import partial
6
7
  from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
@@ -15,6 +16,9 @@ if TYPE_CHECKING:
15
16
  _health_check_path = "/dbos-healthz"
16
17
  _workflow_recovery_path = "/dbos-workflow-recovery"
17
18
  _deactivate_path = "/deactivate"
19
+ # /workflows/:workflow_id/cancel
20
+ # /workflows/:workflow_id/resume
21
+ # /workflows/:workflow_id/restart
18
22
 
19
23
 
20
24
  class AdminServer:
@@ -79,12 +83,51 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
79
83
  self._end_headers()
80
84
  self.wfile.write(json.dumps(workflow_ids).encode("utf-8"))
81
85
  else:
82
- self.send_response(404)
83
- self._end_headers()
86
+
87
+ restart_match = re.match(
88
+ r"^/workflows/(?P<workflow_id>[^/]+)/restart$", self.path
89
+ )
90
+ resume_match = re.match(
91
+ r"^/workflows/(?P<workflow_id>[^/]+)/resume$", self.path
92
+ )
93
+ cancel_match = re.match(
94
+ r"^/workflows/(?P<workflow_id>[^/]+)/cancel$", self.path
95
+ )
96
+
97
+ if restart_match:
98
+ workflow_id = restart_match.group("workflow_id")
99
+ self._handle_restart(workflow_id)
100
+ elif resume_match:
101
+ workflow_id = resume_match.group("workflow_id")
102
+ self._handle_resume(workflow_id)
103
+ elif cancel_match:
104
+ workflow_id = cancel_match.group("workflow_id")
105
+ self._handle_cancel(workflow_id)
106
+ else:
107
+ self.send_response(404)
108
+ self._end_headers()
84
109
 
85
110
  def log_message(self, format: str, *args: Any) -> None:
86
111
  return # Disable admin server request logging
87
112
 
113
+ def _handle_restart(self, workflow_id: str) -> None:
114
+ self.dbos.restart_workflow(workflow_id)
115
+ print("Restarting workflow", workflow_id)
116
+ self.send_response(204)
117
+ self._end_headers()
118
+
119
+ def _handle_resume(self, workflow_id: str) -> None:
120
+ print("Resuming workflow", workflow_id)
121
+ self.dbos.resume_workflow(workflow_id)
122
+ self.send_response(204)
123
+ self._end_headers()
124
+
125
+ def _handle_cancel(self, workflow_id: str) -> None:
126
+ print("Cancelling workflow", workflow_id)
127
+ self.dbos.cancel_workflow(workflow_id)
128
+ self.send_response(204)
129
+ self._end_headers()
130
+
88
131
 
89
132
  # Be consistent with DBOS-TS response.
90
133
  class PerfUtilization(TypedDict):
@@ -266,7 +266,9 @@ def _execute_workflow_wthread(
266
266
  raise
267
267
 
268
268
 
269
- def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]":
269
+ def execute_workflow_by_id(
270
+ dbos: "DBOS", workflow_id: str, startNew: bool = False
271
+ ) -> "WorkflowHandle[Any]":
270
272
  status = dbos._sys_db.get_workflow_status(workflow_id)
271
273
  if not status:
272
274
  raise DBOSRecoveryError(workflow_id, "Workflow status not found")
@@ -293,7 +295,8 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
293
295
  workflow_id,
294
296
  f"Cannot execute workflow because instance '{iname}' is not registered",
295
297
  )
296
- with SetWorkflowID(workflow_id):
298
+
299
+ if startNew:
297
300
  return start_workflow(
298
301
  dbos,
299
302
  wf_func,
@@ -303,6 +306,17 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
303
306
  *inputs["args"],
304
307
  **inputs["kwargs"],
305
308
  )
309
+ else:
310
+ with SetWorkflowID(workflow_id):
311
+ return start_workflow(
312
+ dbos,
313
+ wf_func,
314
+ status["queue_name"],
315
+ True,
316
+ dbos._registry.instance_info_map[iname],
317
+ *inputs["args"],
318
+ **inputs["kwargs"],
319
+ )
306
320
  elif status["class_name"] is not None:
307
321
  class_name = status["class_name"]
308
322
  if class_name not in dbos._registry.class_info_map:
@@ -310,7 +324,8 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
310
324
  workflow_id,
311
325
  f"Cannot execute workflow because class '{class_name}' is not registered",
312
326
  )
313
- with SetWorkflowID(workflow_id):
327
+
328
+ if startNew:
314
329
  return start_workflow(
315
330
  dbos,
316
331
  wf_func,
@@ -320,8 +335,19 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
320
335
  *inputs["args"],
321
336
  **inputs["kwargs"],
322
337
  )
338
+ else:
339
+ with SetWorkflowID(workflow_id):
340
+ return start_workflow(
341
+ dbos,
342
+ wf_func,
343
+ status["queue_name"],
344
+ True,
345
+ dbos._registry.class_info_map[class_name],
346
+ *inputs["args"],
347
+ **inputs["kwargs"],
348
+ )
323
349
  else:
324
- with SetWorkflowID(workflow_id):
350
+ if startNew:
325
351
  return start_workflow(
326
352
  dbos,
327
353
  wf_func,
@@ -330,6 +356,16 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
330
356
  *inputs["args"],
331
357
  **inputs["kwargs"],
332
358
  )
359
+ else:
360
+ with SetWorkflowID(workflow_id):
361
+ return start_workflow(
362
+ dbos,
363
+ wf_func,
364
+ status["queue_name"],
365
+ True,
366
+ *inputs["args"],
367
+ **inputs["kwargs"],
368
+ )
333
369
 
334
370
 
335
371
  @overload
@@ -56,6 +56,7 @@ from ._registrations import (
56
56
  )
57
57
  from ._roles import default_required_roles, required_roles
58
58
  from ._scheduler import ScheduledWorkflow, scheduled
59
+ from ._sys_db import WorkflowStatusString
59
60
  from ._tracer import dbos_tracer
60
61
 
61
62
  if TYPE_CHECKING:
@@ -231,6 +232,7 @@ class DBOS:
231
232
  f"DBOS configured multiple times with conflicting information"
232
233
  )
233
234
  config = _dbos_global_registry.config
235
+
234
236
  _dbos_global_instance = super().__new__(cls)
235
237
  _dbos_global_instance.__init__(fastapi=fastapi, config=config, flask=flask) # type: ignore
236
238
  else:
@@ -767,6 +769,11 @@ class DBOS:
767
769
  """Execute a workflow by ID (for recovery)."""
768
770
  return execute_workflow_by_id(_get_dbos_instance(), workflow_id)
769
771
 
772
+ @classmethod
773
+ def restart_workflow(cls, workflow_id: str) -> None:
774
+ """Execute a workflow by ID (for recovery)."""
775
+ execute_workflow_by_id(_get_dbos_instance(), workflow_id, True)
776
+
770
777
  @classmethod
771
778
  def recover_pending_workflows(
772
779
  cls, executor_ids: List[str] = ["local"]
@@ -774,6 +781,18 @@ class DBOS:
774
781
  """Find all PENDING workflows and execute them."""
775
782
  return recover_pending_workflows(_get_dbos_instance(), executor_ids)
776
783
 
784
+ @classmethod
785
+ def cancel_workflow(cls, workflow_id: str) -> None:
786
+ """Cancel a workflow by ID."""
787
+ _get_dbos_instance()._sys_db.set_workflow_status(
788
+ workflow_id, WorkflowStatusString.CANCELLED, False
789
+ )
790
+
791
+ @classmethod
792
+ def resume_workflow(cls, workflow_id: str) -> None:
793
+ """Resume a workflow by ID."""
794
+ execute_workflow_by_id(_get_dbos_instance(), workflow_id, False)
795
+
777
796
  @classproperty
778
797
  def logger(cls) -> Logger:
779
798
  """Return the DBOS `Logger` for the current context."""
@@ -94,7 +94,11 @@ def setup_fastapi_middleware(app: FastAPI, dbos: DBOS) -> None:
94
94
  with EnterDBOSHandler(attributes):
95
95
  ctx = assert_current_dbos_context()
96
96
  ctx.request = _make_request(request)
97
- workflow_id = request.headers.get("dbos-idempotency-key", "")
98
- with SetWorkflowID(workflow_id):
97
+ workflow_id = request.headers.get("dbos-idempotency-key")
98
+ if workflow_id is not None:
99
+ # Set the workflow ID for the handler
100
+ with SetWorkflowID(workflow_id):
101
+ response = await call_next(request)
102
+ else:
99
103
  response = await call_next(request)
100
104
  return response
@@ -34,8 +34,12 @@ class FlaskMiddleware:
34
34
  with EnterDBOSHandler(attributes):
35
35
  ctx = assert_current_dbos_context()
36
36
  ctx.request = _make_request(request)
37
- workflow_id = request.headers.get("dbos-idempotency-key", "")
38
- with SetWorkflowID(workflow_id):
37
+ workflow_id = request.headers.get("dbos-idempotency-key")
38
+ if workflow_id is not None:
39
+ # Set the workflow ID for the handler
40
+ with SetWorkflowID(workflow_id):
41
+ response = self.app(environ, start_response)
42
+ else:
39
43
  response = self.app(environ, start_response)
40
44
  return response
41
45
 
@@ -1,3 +1,6 @@
1
+ import importlib
2
+ import os
3
+ import sys
1
4
  from typing import Any, List, Optional, cast
2
5
 
3
6
  import typer
@@ -6,6 +9,7 @@ from rich import print
6
9
  from dbos import DBOS
7
10
 
8
11
  from . import _serialization, load_config
12
+ from ._core import execute_workflow_by_id
9
13
  from ._dbos_config import ConfigFile, _is_valid_app_name
10
14
  from ._sys_db import (
11
15
  GetWorkflowsInput,
@@ -123,11 +127,6 @@ def _cancel_workflow(config: ConfigFile, uuid: str) -> None:
123
127
  sys_db.destroy()
124
128
 
125
129
 
126
- def _reattempt_workflow(uuid: str, startNewWorkflow: bool) -> None:
127
- print(f"Reattempt workflow info for {uuid} not implemented")
128
- return
129
-
130
-
131
130
  def _get_workflow_info(
132
131
  sys_db: SystemDatabase, workflowUUID: str, getRequest: bool
133
132
  ) -> Optional[WorkflowInformation]:
@@ -9,6 +9,7 @@ from os import path
9
9
  from typing import Any
10
10
 
11
11
  import jsonpickle # type: ignore
12
+ import requests
12
13
  import sqlalchemy as sa
13
14
  import tomlkit
14
15
  import typer
@@ -22,12 +23,7 @@ from . import _serialization, load_config
22
23
  from ._app_db import ApplicationDatabase
23
24
  from ._dbos_config import _is_valid_app_name
24
25
  from ._sys_db import SystemDatabase
25
- from ._workflow_commands import (
26
- _cancel_workflow,
27
- _get_workflow,
28
- _list_workflows,
29
- _reattempt_workflow,
30
- )
26
+ from ._workflow_commands import _cancel_workflow, _get_workflow, _list_workflows
31
27
 
32
28
  app = typer.Typer()
33
29
  workflow = typer.Typer()
@@ -432,5 +428,49 @@ def cancel(
432
428
  print(f"Workflow {uuid} has been cancelled")
433
429
 
434
430
 
431
+ @workflow.command(help="Resume a workflow that has been cancelled")
432
+ def resume(
433
+ uuid: Annotated[str, typer.Argument()],
434
+ host: Annotated[
435
+ typing.Optional[str],
436
+ typer.Option("--host", "-h", help="Specify the admin host"),
437
+ ] = "localhost",
438
+ port: Annotated[
439
+ typing.Optional[int],
440
+ typer.Option("--port", "-p", help="Specify the admin port"),
441
+ ] = 3001,
442
+ ) -> None:
443
+ response = requests.post(
444
+ f"http://{host}:{port}/workflows/{uuid}/resume", json=[], timeout=5
445
+ )
446
+
447
+ if response.status_code == 200:
448
+ print(f"Workflow {uuid} has been resumed")
449
+ else:
450
+ print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
451
+
452
+
453
+ @workflow.command(help="Restart a workflow from the beginning with a new id")
454
+ def restart(
455
+ uuid: Annotated[str, typer.Argument()],
456
+ host: Annotated[
457
+ typing.Optional[str],
458
+ typer.Option("--host", "-h", help="Specify the admin host"),
459
+ ] = "localhost",
460
+ port: Annotated[
461
+ typing.Optional[int],
462
+ typer.Option("--port", "-p", help="Specify the admin port"),
463
+ ] = 3001,
464
+ ) -> None:
465
+ response = requests.post(
466
+ f"http://{host}:{port}/workflows/{uuid}/restart", json=[], timeout=5
467
+ )
468
+
469
+ if response.status_code == 200:
470
+ print(f"Workflow {uuid} has been restarted")
471
+ else:
472
+ print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
473
+
474
+
435
475
  if __name__ == "__main__":
436
476
  app()
@@ -27,7 +27,7 @@ dependencies = [
27
27
  ]
28
28
  requires-python = ">=3.9"
29
29
  readme = "README.md"
30
- version = "0.19.0a9"
30
+ version = "0.20.0a2"
31
31
 
32
32
  [project.license]
33
33
  text = "MIT"
@@ -5,7 +5,7 @@ import uuid
5
5
  import requests
6
6
 
7
7
  # Public API
8
- from dbos import DBOS, SetWorkflowID
8
+ from dbos import DBOS, ConfigFile, SetWorkflowID, _workflow_commands
9
9
 
10
10
 
11
11
  def test_admin_endpoints(dbos: DBOS) -> None:
@@ -148,3 +148,121 @@ runtimeConfig:
148
148
  # Clean up after the test
149
149
  DBOS.destroy()
150
150
  os.remove("dbos-config.yaml")
151
+
152
+
153
+ def test_admin_workflow_resume(dbos: DBOS, config: ConfigFile) -> None:
154
+
155
+ @DBOS.workflow()
156
+ def simple_workflow() -> None:
157
+ print("Executed Simple workflow")
158
+ return
159
+
160
+ # run the workflow
161
+ simple_workflow()
162
+ time.sleep(1)
163
+
164
+ # get the workflow list
165
+ output = _workflow_commands._list_workflows(
166
+ config, 10, None, None, None, None, False, None
167
+ )
168
+ assert len(output) == 1, f"Expected list length to be 1, but got {len(output)}"
169
+
170
+ assert output[0] != None, "Expected output to be not None"
171
+
172
+ wfUuid = output[0].workflowUUID
173
+
174
+ info = _workflow_commands._get_workflow(config, wfUuid, True)
175
+ assert info is not None, "Expected output to be not None"
176
+
177
+ assert info.status == "SUCCESS", f"Expected status to be SUCCESS"
178
+
179
+ response = requests.post(
180
+ f"http://localhost:3001/workflows/{wfUuid}/cancel", json=[], timeout=5
181
+ )
182
+ assert response.status_code == 204
183
+
184
+ info = _workflow_commands._get_workflow(config, wfUuid, True)
185
+ if info is not None:
186
+ assert info.status == "CANCELLED", f"Expected status to be CANCELLED"
187
+ else:
188
+ assert False, "Expected info to be not None"
189
+
190
+ response = requests.post(
191
+ f"http://localhost:3001/workflows/{wfUuid}/resume", json=[], timeout=5
192
+ )
193
+ assert response.status_code == 204
194
+
195
+ time.sleep(1)
196
+
197
+ info = _workflow_commands._get_workflow(config, wfUuid, True)
198
+ if info is not None:
199
+ assert info.status == "SUCCESS", f"Expected status to be SUCCESS"
200
+ else:
201
+ assert False, "Expected info to be not None"
202
+
203
+
204
+ def test_admin_workflow_restart(dbos: DBOS, config: ConfigFile) -> None:
205
+
206
+ @DBOS.workflow()
207
+ def simple_workflow() -> None:
208
+ print("Executed Simple workflow")
209
+ return
210
+
211
+ # run the workflow
212
+ simple_workflow()
213
+ time.sleep(1)
214
+
215
+ # get the workflow list
216
+ output = _workflow_commands._list_workflows(
217
+ config, 10, None, None, None, None, False, None
218
+ )
219
+ assert len(output) == 1, f"Expected list length to be 1, but got {len(output)}"
220
+
221
+ assert output[0] != None, "Expected output to be not None"
222
+
223
+ wfUuid = output[0].workflowUUID
224
+
225
+ info = _workflow_commands._get_workflow(config, wfUuid, True)
226
+ assert info is not None, "Expected output to be not None"
227
+
228
+ assert info.status == "SUCCESS", f"Expected status to be SUCCESS"
229
+
230
+ response = requests.post(
231
+ f"http://localhost:3001/workflows/{wfUuid}/cancel", json=[], timeout=5
232
+ )
233
+ assert response.status_code == 204
234
+
235
+ info = _workflow_commands._get_workflow(config, wfUuid, True)
236
+ if info is not None:
237
+ assert info.status == "CANCELLED", f"Expected status to be CANCELLED"
238
+ else:
239
+ assert False, "Expected info to be not None"
240
+
241
+ response = requests.post(
242
+ f"http://localhost:3001/workflows/{wfUuid}/restart", json=[], timeout=5
243
+ )
244
+ assert response.status_code == 204
245
+
246
+ time.sleep(1)
247
+
248
+ info = _workflow_commands._get_workflow(config, wfUuid, True)
249
+ if info is not None:
250
+ assert info.status == "CANCELLED", f"Expected status to be CANCELLED"
251
+ else:
252
+ assert False, "Expected info to be not None"
253
+
254
+ output = _workflow_commands._list_workflows(
255
+ config, 10, None, None, None, None, False, None
256
+ )
257
+ assert len(output) == 2, f"Expected list length to be 2, but got {len(output)}"
258
+
259
+ if output[0].workflowUUID == wfUuid:
260
+ new_wfUuid = output[1].workflowUUID
261
+ else:
262
+ new_wfUuid = output[0].workflowUUID
263
+
264
+ info = _workflow_commands._get_workflow(config, new_wfUuid, True)
265
+ if info is not None:
266
+ assert info.status == "SUCCESS", f"Expected status to be SUCCESS"
267
+ else:
268
+ assert False, "Expected info to be not None"
@@ -1,6 +1,8 @@
1
+ import logging
1
2
  import uuid
2
3
  from typing import Tuple
3
4
 
5
+ import pytest
4
6
  import sqlalchemy as sa
5
7
  from fastapi import FastAPI
6
8
  from fastapi.testclient import TestClient
@@ -12,7 +14,9 @@ from dbos import DBOS
12
14
  from dbos._context import assert_current_dbos_context
13
15
 
14
16
 
15
- def test_simple_endpoint(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
17
+ def test_simple_endpoint(
18
+ dbos_fastapi: Tuple[DBOS, FastAPI], caplog: pytest.LogCaptureFixture
19
+ ) -> None:
16
20
  dbos, app = dbos_fastapi
17
21
  client = TestClient(app)
18
22
 
@@ -32,6 +36,7 @@ def test_simple_endpoint(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
32
36
  res2 = test_step(var2)
33
37
  return res1 + res2
34
38
 
39
+ @app.get("/transaction/{var}")
35
40
  @DBOS.transaction()
36
41
  def test_transaction(var: str) -> str:
37
42
  rows = DBOS.sql_session.execute(sa.text("SELECT 1")).fetchall()
@@ -41,13 +46,27 @@ def test_simple_endpoint(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
41
46
  def test_step(var: str) -> str:
42
47
  return var
43
48
 
49
+ original_propagate = logging.getLogger("dbos").propagate
50
+ caplog.set_level(logging.WARNING, "dbos")
51
+ logging.getLogger("dbos").propagate = True
52
+
44
53
  response = client.get("/workflow/bob/bob")
45
54
  assert response.status_code == 200
46
55
  assert response.text == '"bob1bob"'
56
+ assert caplog.text == ""
47
57
 
48
58
  response = client.get("/endpoint/bob/bob")
49
59
  assert response.status_code == 200
50
60
  assert response.text == '"bob1bob"'
61
+ assert caplog.text == ""
62
+
63
+ response = client.get("/transaction/bob")
64
+ assert response.status_code == 200
65
+ assert response.text == '"bob1"'
66
+ assert caplog.text == ""
67
+
68
+ # Reset logging
69
+ logging.getLogger("dbos").propagate = original_propagate
51
70
 
52
71
 
53
72
  def test_start_workflow(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
@@ -1,6 +1,8 @@
1
+ import logging
1
2
  import uuid
2
3
  from typing import Tuple
3
4
 
5
+ import pytest
4
6
  import sqlalchemy as sa
5
7
  from flask import Flask, Response, jsonify
6
8
 
@@ -8,7 +10,9 @@ from dbos import DBOS
8
10
  from dbos._context import assert_current_dbos_context
9
11
 
10
12
 
11
- def test_flask_endpoint(dbos_flask: Tuple[DBOS, Flask]) -> None:
13
+ def test_flask_endpoint(
14
+ dbos_flask: Tuple[DBOS, Flask], caplog: pytest.LogCaptureFixture
15
+ ) -> None:
12
16
  _, app = dbos_flask
13
17
 
14
18
  @app.route("/endpoint/<var1>/<var2>")
@@ -27,6 +31,7 @@ def test_flask_endpoint(dbos_flask: Tuple[DBOS, Flask]) -> None:
27
31
  result = res1 + res2
28
32
  return jsonify({"result": result})
29
33
 
34
+ @app.route("/transaction/<var>")
30
35
  @DBOS.transaction()
31
36
  def test_transaction(var: str) -> str:
32
37
  rows = DBOS.sql_session.execute(sa.text("SELECT 1")).fetchall()
@@ -39,13 +44,27 @@ def test_flask_endpoint(dbos_flask: Tuple[DBOS, Flask]) -> None:
39
44
  app.config["TESTING"] = True
40
45
  client = app.test_client()
41
46
 
47
+ original_propagate = logging.getLogger("dbos").propagate
48
+ caplog.set_level(logging.WARNING, "dbos")
49
+ logging.getLogger("dbos").propagate = True
50
+
42
51
  response = client.get("/endpoint/a/b")
43
52
  assert response.status_code == 200
44
53
  assert response.json == {"result": "a1b"}
54
+ assert caplog.text == ""
45
55
 
46
56
  response = client.get("/workflow/a/b")
47
57
  assert response.status_code == 200
48
58
  assert response.json == {"result": "a1b"}
59
+ assert caplog.text == ""
60
+
61
+ response = client.get("/transaction/bob")
62
+ assert response.status_code == 200
63
+ assert response.text == "bob1"
64
+ assert caplog.text == ""
65
+
66
+ # Reset logging
67
+ logging.getLogger("dbos").propagate = original_propagate
49
68
 
50
69
 
51
70
  def test_endpoint_recovery(dbos_flask: Tuple[DBOS, Flask]) -> None:
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes