dbos 0.23.0a3__tar.gz → 0.23.0a5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (94) hide show
  1. {dbos-0.23.0a3 → dbos-0.23.0a5}/PKG-INFO +1 -1
  2. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_core.py +2 -0
  3. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_sys_db.py +39 -82
  4. dbos-0.23.0a5/dbos/_workflow_commands.py +148 -0
  5. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/cli/cli.py +41 -20
  6. {dbos-0.23.0a3 → dbos-0.23.0a5}/pyproject.toml +1 -1
  7. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/conftest.py +8 -0
  8. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_admin_server.py +24 -25
  9. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_dbos.py +7 -5
  10. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_fastapi.py +2 -0
  11. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_flask.py +2 -0
  12. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_scheduler.py +2 -0
  13. dbos-0.23.0a5/tests/test_workflow_cmds.py +299 -0
  14. dbos-0.23.0a3/dbos/_workflow_commands.py +0 -183
  15. dbos-0.23.0a3/tests/test_workflow_cmds.py +0 -289
  16. {dbos-0.23.0a3 → dbos-0.23.0a5}/LICENSE +0 -0
  17. {dbos-0.23.0a3 → dbos-0.23.0a5}/README.md +0 -0
  18. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/__init__.py +0 -0
  19. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_admin_server.py +0 -0
  20. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_app_db.py +0 -0
  21. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_classproperty.py +0 -0
  22. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_cloudutils/authentication.py +0 -0
  23. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_cloudutils/cloudutils.py +0 -0
  24. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_cloudutils/databases.py +0 -0
  25. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_context.py +0 -0
  26. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_croniter.py +0 -0
  27. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_db_wizard.py +0 -0
  28. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_dbos.py +0 -0
  29. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_dbos_config.py +0 -0
  30. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_error.py +0 -0
  31. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_fastapi.py +0 -0
  32. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_flask.py +0 -0
  33. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_kafka.py +0 -0
  34. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_kafka_message.py +0 -0
  35. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_logger.py +0 -0
  36. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_migrations/env.py +0 -0
  37. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_migrations/script.py.mako +0 -0
  38. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  39. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  40. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
  41. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
  42. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
  43. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
  44. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
  45. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_outcome.py +0 -0
  46. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_queue.py +0 -0
  47. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_recovery.py +0 -0
  48. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_registrations.py +0 -0
  49. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_request.py +0 -0
  50. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_roles.py +0 -0
  51. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_scheduler.py +0 -0
  52. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_schemas/__init__.py +0 -0
  53. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_schemas/application_database.py +0 -0
  54. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_schemas/system_database.py +0 -0
  55. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_serialization.py +0 -0
  56. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_templates/dbos-db-starter/README.md +0 -0
  57. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  58. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_templates/dbos-db-starter/__package/main.py +0 -0
  59. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  60. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
  61. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  62. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
  63. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
  64. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
  65. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  66. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_tracer.py +0 -0
  67. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/_utils.py +0 -0
  68. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/cli/_github_init.py +0 -0
  69. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/cli/_template_init.py +0 -0
  70. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/dbos-config.schema.json +0 -0
  71. {dbos-0.23.0a3 → dbos-0.23.0a5}/dbos/py.typed +0 -0
  72. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/__init__.py +0 -0
  73. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/atexit_no_ctor.py +0 -0
  74. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/atexit_no_launch.py +0 -0
  75. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/classdefs.py +0 -0
  76. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/more_classdefs.py +0 -0
  77. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/queuedworkflow.py +0 -0
  78. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_async.py +0 -0
  79. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_classdecorators.py +0 -0
  80. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_concurrency.py +0 -0
  81. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_config.py +0 -0
  82. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_croniter.py +0 -0
  83. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_failures.py +0 -0
  84. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_fastapi_roles.py +0 -0
  85. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_kafka.py +0 -0
  86. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_outcome.py +0 -0
  87. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_package.py +0 -0
  88. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_queue.py +0 -0
  89. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_schema_migration.py +0 -0
  90. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_singleton.py +0 -0
  91. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_spans.py +0 -0
  92. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_sqlalchemy.py +0 -0
  93. {dbos-0.23.0a3 → dbos-0.23.0a5}/tests/test_workflow_cancel.py +0 -0
  94. {dbos-0.23.0a3 → dbos-0.23.0a5}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.23.0a3
3
+ Version: 0.23.0a5
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -177,6 +177,8 @@ def _init_workflow(
177
177
  ),
178
178
  "assumed_role": ctx.assumed_role,
179
179
  "queue_name": queue,
180
+ "created_at": None,
181
+ "updated_at": None,
180
182
  }
181
183
 
182
184
  # If we have a class name, the first arg is the instance and do not serialize
@@ -68,17 +68,19 @@ class WorkflowStatusInternal(TypedDict):
68
68
  name: str
69
69
  class_name: Optional[str]
70
70
  config_name: Optional[str]
71
+ authenticated_user: Optional[str]
72
+ assumed_role: Optional[str]
73
+ authenticated_roles: Optional[str] # JSON list of roles
71
74
  output: Optional[str] # JSON (jsonpickle)
75
+ request: Optional[str] # JSON (jsonpickle)
72
76
  error: Optional[str] # JSON (jsonpickle)
77
+ created_at: Optional[int] # Unix epoch timestamp in ms
78
+ updated_at: Optional[int] # Unix epoch timestamp in ms
79
+ queue_name: Optional[str]
73
80
  executor_id: Optional[str]
74
81
  app_version: Optional[str]
75
82
  app_id: Optional[str]
76
- request: Optional[str] # JSON (jsonpickle)
77
83
  recovery_attempts: Optional[int]
78
- authenticated_user: Optional[str]
79
- assumed_role: Optional[str]
80
- authenticated_roles: Optional[str] # JSON list of roles.
81
- queue_name: Optional[str]
82
84
 
83
85
 
84
86
  class RecordedResult(TypedDict):
@@ -104,19 +106,12 @@ class GetWorkflowsInput:
104
106
  Structure for argument to `get_workflows` function.
105
107
 
106
108
  This specifies the search criteria for workflow retrieval by `get_workflows`.
107
-
108
- Attributes:
109
- name(str): The name of the workflow function
110
- authenticated_user(str): The name of the user who invoked the function
111
- start_time(str): Beginning of search range for time of invocation, in ISO 8601 format
112
- end_time(str): End of search range for time of invocation, in ISO 8601 format
113
- status(str): Current status of the workflow invocation (see `WorkflowStatusString`)
114
- application_version(str): Application version that invoked the workflow
115
- limit(int): Limit on number of returned records
116
-
117
109
  """
118
110
 
119
111
  def __init__(self) -> None:
112
+ self.workflow_ids: Optional[List[str]] = (
113
+ None # Search only in these workflow IDs
114
+ )
120
115
  self.name: Optional[str] = None # The name of the workflow function
121
116
  self.authenticated_user: Optional[str] = None # The user who ran the workflow.
122
117
  self.start_time: Optional[str] = None # Timestamp in ISO 8601 format
@@ -128,14 +123,18 @@ class GetWorkflowsInput:
128
123
  self.limit: Optional[int] = (
129
124
  None # Return up to this many workflows IDs. IDs are ordered by workflow creation time.
130
125
  )
126
+ self.offset: Optional[int] = (
127
+ None # Offset into the matching records for pagination
128
+ )
131
129
 
132
130
 
133
131
  class GetQueuedWorkflowsInput(TypedDict):
134
- queue_name: Optional[str]
135
- status: Optional[str]
132
+ queue_name: Optional[str] # Get workflows belonging to this queue
133
+ status: Optional[str] # Get workflows with this status
136
134
  start_time: Optional[str] # Timestamp in ISO 8601 format
137
135
  end_time: Optional[str] # Timestamp in ISO 8601 format
138
136
  limit: Optional[int] # Return up to this many workflows IDs.
137
+ offset: Optional[int] # Offset into the matching records for pagination
139
138
  name: Optional[str] # The name of the workflow function
140
139
 
141
140
 
@@ -489,27 +488,33 @@ class SystemDatabase:
489
488
  SystemSchema.workflow_status.c.assumed_role,
490
489
  SystemSchema.workflow_status.c.queue_name,
491
490
  SystemSchema.workflow_status.c.executor_id,
491
+ SystemSchema.workflow_status.c.created_at,
492
+ SystemSchema.workflow_status.c.updated_at,
493
+ SystemSchema.workflow_status.c.application_version,
494
+ SystemSchema.workflow_status.c.application_id,
492
495
  ).where(SystemSchema.workflow_status.c.workflow_uuid == workflow_uuid)
493
496
  ).fetchone()
494
497
  if row is None:
495
498
  return None
496
499
  status: WorkflowStatusInternal = {
497
500
  "workflow_uuid": workflow_uuid,
498
- "status": row[0],
499
- "name": row[1],
500
- "class_name": row[5],
501
- "config_name": row[4],
502
501
  "output": None,
503
502
  "error": None,
504
- "app_id": None,
505
- "app_version": None,
506
- "executor_id": row[10],
503
+ "status": row[0],
504
+ "name": row[1],
507
505
  "request": row[2],
508
506
  "recovery_attempts": row[3],
507
+ "config_name": row[4],
508
+ "class_name": row[5],
509
509
  "authenticated_user": row[6],
510
510
  "authenticated_roles": row[7],
511
511
  "assumed_role": row[8],
512
512
  "queue_name": row[9],
513
+ "executor_id": row[10],
514
+ "created_at": row[11],
515
+ "updated_at": row[12],
516
+ "app_version": row[13],
517
+ "app_id": row[14],
513
518
  }
514
519
  return status
515
520
 
@@ -538,47 +543,6 @@ class SystemDatabase:
538
543
  )
539
544
  return stat
540
545
 
541
- def get_workflow_status_w_outputs(
542
- self, workflow_uuid: str
543
- ) -> Optional[WorkflowStatusInternal]:
544
- with self.engine.begin() as c:
545
- row = c.execute(
546
- sa.select(
547
- SystemSchema.workflow_status.c.status,
548
- SystemSchema.workflow_status.c.name,
549
- SystemSchema.workflow_status.c.request,
550
- SystemSchema.workflow_status.c.output,
551
- SystemSchema.workflow_status.c.error,
552
- SystemSchema.workflow_status.c.config_name,
553
- SystemSchema.workflow_status.c.class_name,
554
- SystemSchema.workflow_status.c.authenticated_user,
555
- SystemSchema.workflow_status.c.authenticated_roles,
556
- SystemSchema.workflow_status.c.assumed_role,
557
- SystemSchema.workflow_status.c.queue_name,
558
- ).where(SystemSchema.workflow_status.c.workflow_uuid == workflow_uuid)
559
- ).fetchone()
560
- if row is None:
561
- return None
562
- status: WorkflowStatusInternal = {
563
- "workflow_uuid": workflow_uuid,
564
- "status": row[0],
565
- "name": row[1],
566
- "config_name": row[5],
567
- "class_name": row[6],
568
- "output": row[3],
569
- "error": row[4],
570
- "app_id": None,
571
- "app_version": None,
572
- "executor_id": None,
573
- "request": row[2],
574
- "recovery_attempts": None,
575
- "authenticated_user": row[7],
576
- "authenticated_roles": row[8],
577
- "assumed_role": row[9],
578
- "queue_name": row[10],
579
- }
580
- return status
581
-
582
546
  def await_workflow_result_internal(self, workflow_uuid: str) -> dict[str, Any]:
583
547
  polling_interval_secs: float = 1.000
584
548
 
@@ -625,21 +589,6 @@ class SystemDatabase:
625
589
  raise _serialization.deserialize_exception(stat["error"])
626
590
  return None
627
591
 
628
- def get_workflow_info(
629
- self, workflow_uuid: str, get_request: bool
630
- ) -> Optional[WorkflowInformation]:
631
- stat = self.get_workflow_status_w_outputs(workflow_uuid)
632
- if stat is None:
633
- return None
634
- info = cast(WorkflowInformation, stat)
635
- input = self.get_workflow_inputs(workflow_uuid)
636
- if input is not None:
637
- info["input"] = input
638
- if not get_request:
639
- info.pop("request", None)
640
-
641
- return info
642
-
643
592
  def update_workflow_inputs(
644
593
  self, workflow_uuid: str, inputs: str, conn: Optional[sa.Connection] = None
645
594
  ) -> None:
@@ -715,14 +664,20 @@ class SystemDatabase:
715
664
  SystemSchema.workflow_status.c.application_version
716
665
  == input.application_version
717
666
  )
667
+ if input.workflow_ids:
668
+ query = query.where(
669
+ SystemSchema.workflow_status.c.workflow_uuid.in_(input.workflow_ids)
670
+ )
718
671
  if input.limit:
719
672
  query = query.limit(input.limit)
673
+ if input.offset:
674
+ query = query.offset(input.offset)
720
675
 
721
676
  with self.engine.begin() as c:
722
677
  rows = c.execute(query)
723
- workflow_uuids = [row[0] for row in rows]
678
+ workflow_ids = [row[0] for row in rows]
724
679
 
725
- return GetWorkflowsOutput(workflow_uuids)
680
+ return GetWorkflowsOutput(workflow_ids)
726
681
 
727
682
  def get_queued_workflows(
728
683
  self, input: GetQueuedWorkflowsInput
@@ -763,6 +718,8 @@ class SystemDatabase:
763
718
  )
764
719
  if input.get("limit"):
765
720
  query = query.limit(input["limit"])
721
+ if input.get("offset"):
722
+ query = query.offset(input["offset"])
766
723
 
767
724
  with self.engine.begin() as c:
768
725
  rows = c.execute(query)
@@ -0,0 +1,148 @@
1
+ from typing import List, Optional, cast
2
+
3
+ import typer
4
+
5
+ from . import _serialization
6
+ from ._dbos_config import ConfigFile
7
+ from ._logger import dbos_logger
8
+ from ._sys_db import (
9
+ GetQueuedWorkflowsInput,
10
+ GetWorkflowsInput,
11
+ GetWorkflowsOutput,
12
+ SystemDatabase,
13
+ WorkflowStatuses,
14
+ )
15
+
16
+
17
+ class WorkflowInformation:
18
+ workflow_id: str
19
+ status: WorkflowStatuses
20
+ workflow_name: str
21
+ workflow_class_name: Optional[str]
22
+ workflow_config_name: Optional[str]
23
+ authenticated_user: Optional[str]
24
+ assumed_role: Optional[str]
25
+ authenticated_roles: Optional[str] # JSON list of roles.
26
+ input: Optional[_serialization.WorkflowInputs] # JSON (jsonpickle)
27
+ output: Optional[str] = None # JSON (jsonpickle)
28
+ request: Optional[str] # JSON (jsonpickle)
29
+ error: Optional[str] = None # JSON (jsonpickle)
30
+ created_at: Optional[int] # Unix epoch timestamp in ms
31
+ updated_at: Optional[int] # Unix epoch timestamp in ms
32
+ queue_name: Optional[str]
33
+ executor_id: Optional[str]
34
+ app_version: Optional[str]
35
+ app_id: Optional[str]
36
+ recovery_attempts: Optional[int]
37
+
38
+
39
+ def list_workflows(
40
+ sys_db: SystemDatabase,
41
+ *,
42
+ workflow_ids: Optional[List[str]] = None,
43
+ user: Optional[str] = None,
44
+ start_time: Optional[str] = None,
45
+ end_time: Optional[str] = None,
46
+ status: Optional[str] = None,
47
+ request: bool = False,
48
+ app_version: Optional[str] = None,
49
+ name: Optional[str] = None,
50
+ limit: Optional[int] = None,
51
+ offset: Optional[int] = None,
52
+ ) -> List[WorkflowInformation]:
53
+ input = GetWorkflowsInput()
54
+ input.workflow_ids = workflow_ids
55
+ input.authenticated_user = user
56
+ input.start_time = start_time
57
+ input.end_time = end_time
58
+ if status is not None:
59
+ input.status = cast(WorkflowStatuses, status)
60
+ input.application_version = app_version
61
+ input.limit = limit
62
+ input.name = name
63
+ input.offset = offset
64
+
65
+ output: GetWorkflowsOutput = sys_db.get_workflows(input)
66
+ infos: List[WorkflowInformation] = []
67
+ for workflow_id in output.workflow_uuids:
68
+ info = get_workflow(sys_db, workflow_id, request) # Call the method for each ID
69
+ if info is not None:
70
+ infos.append(info)
71
+ return infos
72
+
73
+
74
+ def list_queued_workflows(
75
+ sys_db: SystemDatabase,
76
+ *,
77
+ limit: Optional[int] = None,
78
+ start_time: Optional[str] = None,
79
+ end_time: Optional[str] = None,
80
+ queue_name: Optional[str] = None,
81
+ status: Optional[str] = None,
82
+ name: Optional[str] = None,
83
+ request: bool = False,
84
+ offset: Optional[int] = None,
85
+ ) -> List[WorkflowInformation]:
86
+ input: GetQueuedWorkflowsInput = {
87
+ "queue_name": queue_name,
88
+ "start_time": start_time,
89
+ "end_time": end_time,
90
+ "status": status,
91
+ "limit": limit,
92
+ "name": name,
93
+ "offset": offset,
94
+ }
95
+ output: GetWorkflowsOutput = sys_db.get_queued_workflows(input)
96
+ infos: List[WorkflowInformation] = []
97
+ for workflow_id in output.workflow_uuids:
98
+ info = get_workflow(sys_db, workflow_id, request) # Call the method for each ID
99
+ if info is not None:
100
+ infos.append(info)
101
+ return infos
102
+
103
+
104
+ def get_workflow(
105
+ sys_db: SystemDatabase, workflowUUID: str, getRequest: bool
106
+ ) -> Optional[WorkflowInformation]:
107
+
108
+ info = sys_db.get_workflow_status(workflowUUID)
109
+ if info is None:
110
+ return None
111
+
112
+ winfo = WorkflowInformation()
113
+
114
+ winfo.workflow_id = workflowUUID
115
+ winfo.status = info["status"]
116
+ winfo.workflow_name = info["name"]
117
+ winfo.workflow_class_name = info["class_name"]
118
+ winfo.workflow_config_name = info["config_name"]
119
+ winfo.authenticated_user = info["authenticated_user"]
120
+ winfo.assumed_role = info["assumed_role"]
121
+ winfo.authenticated_roles = info["authenticated_roles"]
122
+ winfo.request = info["request"]
123
+ winfo.created_at = info["created_at"]
124
+ winfo.updated_at = info["updated_at"]
125
+ winfo.queue_name = info["queue_name"]
126
+ winfo.executor_id = info["executor_id"]
127
+ winfo.app_version = info["app_version"]
128
+ winfo.app_id = info["app_id"]
129
+ winfo.recovery_attempts = info["recovery_attempts"]
130
+
131
+ # no input field
132
+ input_data = sys_db.get_workflow_inputs(workflowUUID)
133
+ if input_data is not None:
134
+ winfo.input = input_data
135
+
136
+ if info.get("status") == "SUCCESS":
137
+ result = sys_db.await_workflow_result(workflowUUID)
138
+ winfo.output = result
139
+ elif info.get("status") == "ERROR":
140
+ try:
141
+ sys_db.await_workflow_result(workflowUUID)
142
+ except Exception as e:
143
+ winfo.error = str(e)
144
+
145
+ if not getRequest:
146
+ winfo.request = None
147
+
148
+ return winfo
@@ -19,12 +19,7 @@ from .. import load_config
19
19
  from .._app_db import ApplicationDatabase
20
20
  from .._dbos_config import _is_valid_app_name
21
21
  from .._sys_db import SystemDatabase, reset_system_database
22
- from .._workflow_commands import (
23
- cancel_workflow,
24
- get_workflow,
25
- list_queued_workflows,
26
- list_workflows,
27
- )
22
+ from .._workflow_commands import get_workflow, list_queued_workflows, list_workflows
28
23
  from ..cli._github_init import create_template_from_github
29
24
  from ._template_init import copy_template, get_project_name, get_templates_directory
30
25
 
@@ -290,25 +285,37 @@ def list(
290
285
  request: Annotated[
291
286
  bool,
292
287
  typer.Option("--request", help="Retrieve workflow request information"),
293
- ] = True,
288
+ ] = False,
294
289
  ) -> None:
295
290
  config = load_config(silent=True)
291
+ sys_db = SystemDatabase(config)
296
292
  workflows = list_workflows(
297
- config, limit, user, starttime, endtime, status, request, appversion, name
293
+ sys_db,
294
+ limit=limit,
295
+ user=user,
296
+ start_time=starttime,
297
+ end_time=endtime,
298
+ status=status,
299
+ request=request,
300
+ app_version=appversion,
301
+ name=name,
298
302
  )
299
303
  print(jsonpickle.encode(workflows, unpicklable=False))
300
304
 
301
305
 
302
306
  @workflow.command(help="Retrieve the status of a workflow")
303
307
  def get(
304
- uuid: Annotated[str, typer.Argument()],
308
+ workflow_id: Annotated[str, typer.Argument()],
305
309
  request: Annotated[
306
310
  bool,
307
311
  typer.Option("--request", help="Retrieve workflow request information"),
308
- ] = True,
312
+ ] = False,
309
313
  ) -> None:
310
314
  config = load_config(silent=True)
311
- print(jsonpickle.encode(get_workflow(config, uuid, request), unpicklable=False))
315
+ sys_db = SystemDatabase(config)
316
+ print(
317
+ jsonpickle.encode(get_workflow(sys_db, workflow_id, request), unpicklable=False)
318
+ )
312
319
 
313
320
 
314
321
  @workflow.command(
@@ -316,10 +323,23 @@ def get(
316
323
  )
317
324
  def cancel(
318
325
  uuid: Annotated[str, typer.Argument()],
326
+ host: Annotated[
327
+ typing.Optional[str],
328
+ typer.Option("--host", "-H", help="Specify the admin host"),
329
+ ] = "localhost",
330
+ port: Annotated[
331
+ typing.Optional[int],
332
+ typer.Option("--port", "-p", help="Specify the admin port"),
333
+ ] = 3001,
319
334
  ) -> None:
320
- config = load_config()
321
- cancel_workflow(config, uuid)
322
- print(f"Workflow {uuid} has been cancelled")
335
+ response = requests.post(
336
+ f"http://{host}:{port}/workflows/{uuid}/cancel", json=[], timeout=5
337
+ )
338
+
339
+ if response.status_code == 204:
340
+ print(f"Workflow {uuid} has been cancelled")
341
+ else:
342
+ print(f"Failed to cancel workflow {uuid}. Status code: {response.status_code}")
323
343
 
324
344
 
325
345
  @workflow.command(help="Resume a workflow that has been cancelled")
@@ -327,7 +347,7 @@ def resume(
327
347
  uuid: Annotated[str, typer.Argument()],
328
348
  host: Annotated[
329
349
  typing.Optional[str],
330
- typer.Option("--host", "-h", help="Specify the admin host"),
350
+ typer.Option("--host", "-H", help="Specify the admin host"),
331
351
  ] = "localhost",
332
352
  port: Annotated[
333
353
  typing.Optional[int],
@@ -338,7 +358,7 @@ def resume(
338
358
  f"http://{host}:{port}/workflows/{uuid}/resume", json=[], timeout=5
339
359
  )
340
360
 
341
- if response.status_code == 200:
361
+ if response.status_code == 204:
342
362
  print(f"Workflow {uuid} has been resumed")
343
363
  else:
344
364
  print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
@@ -349,7 +369,7 @@ def restart(
349
369
  uuid: Annotated[str, typer.Argument()],
350
370
  host: Annotated[
351
371
  typing.Optional[str],
352
- typer.Option("--host", "-h", help="Specify the admin host"),
372
+ typer.Option("--host", "-H", help="Specify the admin host"),
353
373
  ] = "localhost",
354
374
  port: Annotated[
355
375
  typing.Optional[int],
@@ -360,7 +380,7 @@ def restart(
360
380
  f"http://{host}:{port}/workflows/{uuid}/restart", json=[], timeout=5
361
381
  )
362
382
 
363
- if response.status_code == 200:
383
+ if response.status_code == 204:
364
384
  print(f"Workflow {uuid} has been restarted")
365
385
  else:
366
386
  print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
@@ -415,11 +435,12 @@ def list_queue(
415
435
  request: Annotated[
416
436
  bool,
417
437
  typer.Option("--request", help="Retrieve workflow request information"),
418
- ] = True,
438
+ ] = False,
419
439
  ) -> None:
420
440
  config = load_config(silent=True)
441
+ sys_db = SystemDatabase(config)
421
442
  workflows = list_queued_workflows(
422
- config=config,
443
+ sys_db=sys_db,
423
444
  limit=limit,
424
445
  start_time=start_time,
425
446
  end_time=end_time,
@@ -27,7 +27,7 @@ dependencies = [
27
27
  ]
28
28
  requires-python = ">=3.9"
29
29
  readme = "README.md"
30
- version = "0.23.0a3"
30
+ version = "0.23.0a5"
31
31
 
32
32
  [project.license]
33
33
  text = "MIT"
@@ -11,6 +11,7 @@ from flask import Flask
11
11
 
12
12
  from dbos import DBOS, ConfigFile
13
13
  from dbos._schemas.system_database import SystemSchema
14
+ from dbos._sys_db import SystemDatabase
14
15
 
15
16
 
16
17
  @pytest.fixture(scope="session")
@@ -45,6 +46,13 @@ def config() -> ConfigFile:
45
46
  return default_config()
46
47
 
47
48
 
49
+ @pytest.fixture()
50
+ def sys_db(config: ConfigFile) -> Generator[SystemDatabase, Any, None]:
51
+ sys_db = SystemDatabase(config)
52
+ yield sys_db
53
+ sys_db.destroy()
54
+
55
+
48
56
  @pytest.fixture(scope="session")
49
57
  def postgres_db_engine() -> sa.Engine:
50
58
  cfg = default_config()
@@ -1,4 +1,5 @@
1
1
  import os
2
+ import threading
2
3
  import time
3
4
  import uuid
4
5
 
@@ -8,7 +9,7 @@ import sqlalchemy as sa
8
9
  # Public API
9
10
  from dbos import DBOS, ConfigFile, Queue, SetWorkflowID, _workflow_commands
10
11
  from dbos._schemas.system_database import SystemSchema
11
- from dbos._sys_db import WorkflowStatusString
12
+ from dbos._sys_db import SystemDatabase, WorkflowStatusString
12
13
  from dbos._utils import GlobalParams
13
14
 
14
15
 
@@ -171,11 +172,13 @@ runtimeConfig:
171
172
  os.remove("dbos-config.yaml")
172
173
 
173
174
 
174
- def test_admin_workflow_resume(dbos: DBOS, config: ConfigFile) -> None:
175
+ def test_admin_workflow_resume(dbos: DBOS, sys_db: SystemDatabase) -> None:
175
176
  counter: int = 0
177
+ event = threading.Event()
176
178
 
177
179
  @DBOS.workflow()
178
180
  def simple_workflow() -> None:
181
+ event.set()
179
182
  nonlocal counter
180
183
  counter += 1
181
184
 
@@ -185,13 +188,11 @@ def test_admin_workflow_resume(dbos: DBOS, config: ConfigFile) -> None:
185
188
  dbos._sys_db.wait_for_buffer_flush()
186
189
 
187
190
  # Verify the workflow has succeeded
188
- output = _workflow_commands.list_workflows(
189
- config, 10, None, None, None, None, False, None, None
190
- )
191
+ output = _workflow_commands.list_workflows(sys_db)
191
192
  assert len(output) == 1, f"Expected list length to be 1, but got {len(output)}"
192
193
  assert output[0] != None, "Expected output to be not None"
193
- wfUuid = output[0].workflowUUID
194
- info = _workflow_commands.get_workflow(config, wfUuid, True)
194
+ wfUuid = output[0].workflow_id
195
+ info = _workflow_commands.get_workflow(sys_db, wfUuid, True)
195
196
  assert info is not None, "Expected output to be not None"
196
197
  assert info.status == "SUCCESS", f"Expected status to be SUCCESS"
197
198
 
@@ -200,7 +201,7 @@ def test_admin_workflow_resume(dbos: DBOS, config: ConfigFile) -> None:
200
201
  f"http://localhost:3001/workflows/{wfUuid}/cancel", json=[], timeout=5
201
202
  )
202
203
  assert response.status_code == 204
203
- info = _workflow_commands.get_workflow(config, wfUuid, True)
204
+ info = _workflow_commands.get_workflow(sys_db, wfUuid, True)
204
205
  assert info is not None
205
206
  assert info.status == "CANCELLED", f"Expected status to be CANCELLED"
206
207
 
@@ -216,13 +217,15 @@ def test_admin_workflow_resume(dbos: DBOS, config: ConfigFile) -> None:
216
217
  c.execute(query)
217
218
 
218
219
  # Resume the workflow. Verify that it succeeds again.
220
+ event.clear()
219
221
  response = requests.post(
220
222
  f"http://localhost:3001/workflows/{wfUuid}/resume", json=[], timeout=5
221
223
  )
222
224
  assert response.status_code == 204
225
+ assert event.wait(timeout=5)
223
226
  dbos._sys_db.wait_for_buffer_flush()
224
227
  assert counter == 2
225
- info = _workflow_commands.get_workflow(config, wfUuid, True)
228
+ info = _workflow_commands.get_workflow(sys_db, wfUuid, True)
226
229
  assert info is not None
227
230
  assert info.status == "SUCCESS", f"Expected status to be SUCCESS"
228
231
  assert info.executor_id == GlobalParams.executor_id
@@ -233,13 +236,13 @@ def test_admin_workflow_resume(dbos: DBOS, config: ConfigFile) -> None:
233
236
  )
234
237
  assert response.status_code == 204
235
238
  dbos._sys_db.wait_for_buffer_flush()
236
- info = _workflow_commands.get_workflow(config, wfUuid, True)
239
+ info = _workflow_commands.get_workflow(sys_db, wfUuid, True)
237
240
  assert info is not None
238
241
  assert info.status == "SUCCESS", f"Expected status to be SUCCESS"
239
242
  assert counter == 2
240
243
 
241
244
 
242
- def test_admin_workflow_restart(dbos: DBOS, config: ConfigFile) -> None:
245
+ def test_admin_workflow_restart(dbos: DBOS, sys_db: SystemDatabase) -> None:
243
246
 
244
247
  @DBOS.workflow()
245
248
  def simple_workflow() -> None:
@@ -251,16 +254,14 @@ def test_admin_workflow_restart(dbos: DBOS, config: ConfigFile) -> None:
251
254
  time.sleep(1)
252
255
 
253
256
  # get the workflow list
254
- output = _workflow_commands.list_workflows(
255
- config, 10, None, None, None, None, False, None, None
256
- )
257
+ output = _workflow_commands.list_workflows(sys_db)
257
258
  assert len(output) == 1, f"Expected list length to be 1, but got {len(output)}"
258
259
 
259
260
  assert output[0] != None, "Expected output to be not None"
260
261
 
261
- wfUuid = output[0].workflowUUID
262
+ wfUuid = output[0].workflow_id
262
263
 
263
- info = _workflow_commands.get_workflow(config, wfUuid, True)
264
+ info = _workflow_commands.get_workflow(sys_db, wfUuid, True)
264
265
  assert info is not None, "Expected output to be not None"
265
266
 
266
267
  assert info.status == "SUCCESS", f"Expected status to be SUCCESS"
@@ -270,7 +271,7 @@ def test_admin_workflow_restart(dbos: DBOS, config: ConfigFile) -> None:
270
271
  )
271
272
  assert response.status_code == 204
272
273
 
273
- info = _workflow_commands.get_workflow(config, wfUuid, True)
274
+ info = _workflow_commands.get_workflow(sys_db, wfUuid, True)
274
275
  if info is not None:
275
276
  assert info.status == "CANCELLED", f"Expected status to be CANCELLED"
276
277
  else:
@@ -283,23 +284,21 @@ def test_admin_workflow_restart(dbos: DBOS, config: ConfigFile) -> None:
283
284
 
284
285
  time.sleep(1)
285
286
 
286
- info = _workflow_commands.get_workflow(config, wfUuid, True)
287
+ info = _workflow_commands.get_workflow(sys_db, wfUuid, True)
287
288
  if info is not None:
288
289
  assert info.status == "CANCELLED", f"Expected status to be CANCELLED"
289
290
  else:
290
291
  assert False, "Expected info to be not None"
291
292
 
292
- output = _workflow_commands.list_workflows(
293
- config, 10, None, None, None, None, False, None, None
294
- )
293
+ output = _workflow_commands.list_workflows(sys_db)
295
294
  assert len(output) == 2, f"Expected list length to be 2, but got {len(output)}"
296
295
 
297
- if output[0].workflowUUID == wfUuid:
298
- new_wfUuid = output[1].workflowUUID
296
+ if output[0].workflow_id == wfUuid:
297
+ new_wfUuid = output[1].workflow_id
299
298
  else:
300
- new_wfUuid = output[0].workflowUUID
299
+ new_wfUuid = output[0].workflow_id
301
300
 
302
- info = _workflow_commands.get_workflow(config, new_wfUuid, True)
301
+ info = _workflow_commands.get_workflow(sys_db, new_wfUuid, True)
303
302
  if info is not None:
304
303
  assert info.status == "SUCCESS", f"Expected status to be SUCCESS"
305
304
  else: