dbos 1.14.0a9__py3-none-any.whl → 1.15.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (45) hide show
  1. dbos/_client.py +13 -14
  2. dbos/_context.py +12 -6
  3. dbos/_core.py +2 -7
  4. dbos/_dbos.py +5 -13
  5. dbos/_dbos_config.py +17 -29
  6. dbos/_debouncer.py +1 -7
  7. dbos/_debug.py +0 -8
  8. dbos/_docker_pg_helper.py +93 -51
  9. dbos/_fastapi.py +5 -1
  10. dbos/_logger.py +18 -21
  11. dbos/_migration.py +4 -41
  12. dbos/_serialization.py +19 -30
  13. dbos/_sys_db_postgres.py +2 -9
  14. dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +34 -0
  15. dbos/_tracer.py +42 -31
  16. dbos/cli/_github_init.py +22 -16
  17. dbos/cli/_template_init.py +5 -16
  18. dbos/cli/cli.py +20 -28
  19. {dbos-1.14.0a9.dist-info → dbos-1.15.0a1.dist-info}/METADATA +8 -16
  20. dbos-1.15.0a1.dist-info/RECORD +59 -0
  21. dbos/_alembic_migrations/env.py +0 -62
  22. dbos/_alembic_migrations/script.py.mako +0 -26
  23. dbos/_alembic_migrations/versions/01ce9f07bd10_streaming.py +0 -42
  24. dbos/_alembic_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -34
  25. dbos/_alembic_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -45
  26. dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py +0 -35
  27. dbos/_alembic_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -35
  28. dbos/_alembic_migrations/versions/5c361fc04708_added_system_tables.py +0 -193
  29. dbos/_alembic_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -71
  30. dbos/_alembic_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -44
  31. dbos/_alembic_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -35
  32. dbos/_alembic_migrations/versions/a3b18ad34abe_added_triggers.py +0 -72
  33. dbos/_alembic_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -43
  34. dbos/_alembic_migrations/versions/d76646551a6c_workflow_queue.py +0 -28
  35. dbos/_alembic_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -30
  36. dbos/_alembic_migrations/versions/eab0cc1d9a14_job_queue.py +0 -56
  37. dbos/_alembic_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -46
  38. dbos/_templates/dbos-db-starter/alembic.ini +0 -116
  39. dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -85
  40. dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -26
  41. dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -35
  42. dbos-1.14.0a9.dist-info/RECORD +0 -79
  43. {dbos-1.14.0a9.dist-info → dbos-1.15.0a1.dist-info}/WHEEL +0 -0
  44. {dbos-1.14.0a9.dist-info → dbos-1.15.0a1.dist-info}/entry_points.txt +0 -0
  45. {dbos-1.14.0a9.dist-info → dbos-1.15.0a1.dist-info}/licenses/LICENSE +0 -0
dbos/_migration.py CHANGED
@@ -1,16 +1,11 @@
1
- import logging
2
- import os
3
- import re
4
1
  import sys
5
2
 
6
3
  import sqlalchemy as sa
7
- from alembic import command
8
- from alembic.config import Config
9
4
 
10
5
  from ._logger import dbos_logger
11
6
 
12
7
 
13
- def ensure_dbos_schema(engine: sa.Engine) -> bool:
8
+ def ensure_dbos_schema(engine: sa.Engine) -> None:
14
9
  """
15
10
  True if using DBOS migrations (DBOS schema and migrations table already exist or were created)
16
11
  False if using Alembic migrations (DBOS schema exists, but dbos_migrations table doesn't)
@@ -22,10 +17,10 @@ def ensure_dbos_schema(engine: sa.Engine) -> bool:
22
17
  "SELECT schema_name FROM information_schema.schemata WHERE schema_name = 'dbos'"
23
18
  )
24
19
  )
25
- schema_existed = schema_result.fetchone() is not None
20
+ schema_exists = schema_result.fetchone() is not None
26
21
 
27
22
  # Create schema if it doesn't exist
28
- if not schema_existed:
23
+ if not schema_exists:
29
24
  conn.execute(sa.text("CREATE SCHEMA dbos"))
30
25
 
31
26
  # Check if dbos_migrations table exists
@@ -36,44 +31,12 @@ def ensure_dbos_schema(engine: sa.Engine) -> bool:
36
31
  )
37
32
  table_exists = table_result.fetchone() is not None
38
33
 
39
- if table_exists:
40
- return True
41
- elif schema_existed:
42
- return False
43
- else:
34
+ if not table_exists:
44
35
  conn.execute(
45
36
  sa.text(
46
37
  "CREATE TABLE dbos.dbos_migrations (version BIGINT NOT NULL PRIMARY KEY)"
47
38
  )
48
39
  )
49
- return True
50
-
51
-
52
- def run_alembic_migrations(engine: sa.Engine) -> None:
53
- """Run system database schema migrations with Alembic.
54
- This is DEPRECATED in favor of DBOS-managed migrations.
55
- It is retained only for backwards compatibility and
56
- will be removed in the next major version."""
57
- # Run a schema migration for the system database
58
- migration_dir = os.path.join(
59
- os.path.dirname(os.path.realpath(__file__)), "_alembic_migrations"
60
- )
61
- alembic_cfg = Config()
62
- alembic_cfg.set_main_option("script_location", migration_dir)
63
- logging.getLogger("alembic").setLevel(logging.WARNING)
64
- # Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
65
- escaped_conn_string = re.sub(
66
- r"%(?=[0-9A-Fa-f]{2})",
67
- "%%",
68
- engine.url.render_as_string(hide_password=False),
69
- )
70
- alembic_cfg.set_main_option("sqlalchemy.url", escaped_conn_string)
71
- try:
72
- command.upgrade(alembic_cfg, "head")
73
- except Exception as e:
74
- dbos_logger.warning(
75
- f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
76
- )
77
40
 
78
41
 
79
42
  def run_dbos_migrations(engine: sa.Engine) -> None:
dbos/_serialization.py CHANGED
@@ -1,8 +1,8 @@
1
+ import base64
2
+ import pickle
1
3
  import types
2
4
  from typing import Any, Dict, Optional, Tuple, TypedDict
3
5
 
4
- import jsonpickle # type: ignore
5
-
6
6
  from ._logger import dbos_logger
7
7
 
8
8
 
@@ -11,54 +11,43 @@ class WorkflowInputs(TypedDict):
11
11
  kwargs: Dict[str, Any]
12
12
 
13
13
 
14
- def _validate_item(data: Any) -> None:
15
- if isinstance(data, (types.MethodType)):
16
- raise TypeError("Serialized data item should not be a class method")
17
- if isinstance(data, (types.FunctionType)):
18
- if jsonpickle.decode(jsonpickle.encode(data, unpicklable=True)) is None:
19
- raise TypeError(
20
- "Serialized function should be defined at the top level of a module"
21
- )
22
-
23
-
24
14
  def serialize(data: Any) -> str:
25
- """Serialize an object to a JSON string using jsonpickle."""
26
- _validate_item(data)
27
- encoded_data: str = jsonpickle.encode(data, unpicklable=True)
15
+ pickled_data: bytes = pickle.dumps(data)
16
+ encoded_data: str = base64.b64encode(pickled_data).decode("utf-8")
28
17
  return encoded_data
29
18
 
30
19
 
31
20
  def serialize_args(data: WorkflowInputs) -> str:
32
- """Serialize args to a JSON string using jsonpickle."""
33
- arg: Any
34
- for arg in data["args"]:
35
- _validate_item(arg)
36
- for arg in data["kwargs"].values():
37
- _validate_item(arg)
38
- encoded_data: str = jsonpickle.encode(data, unpicklable=True)
21
+ """Serialize args to a base64-encoded string using pickle."""
22
+ pickled_data: bytes = pickle.dumps(data)
23
+ encoded_data: str = base64.b64encode(pickled_data).decode("utf-8")
39
24
  return encoded_data
40
25
 
41
26
 
42
27
  def serialize_exception(data: Exception) -> str:
43
- """Serialize an Exception object to a JSON string using jsonpickle."""
44
- encoded_data: str = jsonpickle.encode(data, unpicklable=True)
28
+ """Serialize an Exception object to a base64-encoded string using pickle."""
29
+ pickled_data: bytes = pickle.dumps(data)
30
+ encoded_data: str = base64.b64encode(pickled_data).decode("utf-8")
45
31
  return encoded_data
46
32
 
47
33
 
48
34
  def deserialize(serialized_data: str) -> Any:
49
- """Deserialize a JSON string back to a Python object using jsonpickle."""
50
- return jsonpickle.decode(serialized_data)
35
+ """Deserialize a base64-encoded string back to a Python object using pickle."""
36
+ pickled_data: bytes = base64.b64decode(serialized_data)
37
+ return pickle.loads(pickled_data)
51
38
 
52
39
 
53
40
  def deserialize_args(serialized_data: str) -> WorkflowInputs:
54
- """Deserialize a JSON string back to a Python object list using jsonpickle."""
55
- args: WorkflowInputs = jsonpickle.decode(serialized_data)
41
+ """Deserialize a base64-encoded string back to a Python object list using pickle."""
42
+ pickled_data: bytes = base64.b64decode(serialized_data)
43
+ args: WorkflowInputs = pickle.loads(pickled_data)
56
44
  return args
57
45
 
58
46
 
59
47
  def deserialize_exception(serialized_data: str) -> Exception:
60
- """Deserialize JSON string back to a Python Exception using jsonpickle."""
61
- exc: Exception = jsonpickle.decode(serialized_data)
48
+ """Deserialize a base64-encoded string back to a Python Exception using pickle."""
49
+ pickled_data: bytes = base64.b64decode(serialized_data)
50
+ exc: Exception = pickle.loads(pickled_data)
62
51
  return exc
63
52
 
64
53
 
dbos/_sys_db_postgres.py CHANGED
@@ -5,11 +5,7 @@ import psycopg
5
5
  import sqlalchemy as sa
6
6
  from sqlalchemy.exc import DBAPIError
7
7
 
8
- from dbos._migration import (
9
- ensure_dbos_schema,
10
- run_alembic_migrations,
11
- run_dbos_migrations,
12
- )
8
+ from dbos._migration import ensure_dbos_schema, run_dbos_migrations
13
9
  from dbos._schemas.system_database import SystemSchema
14
10
 
15
11
  from ._logger import dbos_logger
@@ -66,10 +62,7 @@ class PostgresSystemDatabase(SystemDatabase):
66
62
  conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
67
63
  engine.dispose()
68
64
 
69
- using_dbos_migrations = ensure_dbos_schema(self.engine)
70
- if not using_dbos_migrations:
71
- # Complete the Alembic migrations, create the dbos_migrations table
72
- run_alembic_migrations(self.engine)
65
+ ensure_dbos_schema(self.engine)
73
66
  run_dbos_migrations(self.engine)
74
67
 
75
68
  def _cleanup_connections(self) -> None:
@@ -0,0 +1,34 @@
1
+ """
2
+ Create the dbos_hello table using SQLAlchemy.
3
+ """
4
+
5
+ import os
6
+ from sqlalchemy import create_engine, MetaData, Table, Column, Integer, String
7
+
8
+
9
+ def create_dbos_hello_table() -> None:
10
+ """
11
+ Create the dbos_hello table in the database.
12
+
13
+ Args:
14
+ database_url: Database connection string. If not provided,
15
+ uses DATABASE_URL environment variable.
16
+ """
17
+ database_url = os.environ.get("DBOS_DATABASE_URL", "postgresql+psycopg://postgres:dbos@localhost:5432/${default_db_name}?connect_timeout=5")
18
+
19
+ engine = create_engine(database_url)
20
+ metadata = MetaData()
21
+
22
+ dbos_hello = Table(
23
+ 'dbos_hello',
24
+ metadata,
25
+ Column('greet_count', Integer, primary_key=True, autoincrement=True, nullable=False),
26
+ Column('name', String, nullable=False)
27
+ )
28
+
29
+ metadata.create_all(engine)
30
+ engine.dispose()
31
+
32
+
33
+ if __name__ == "__main__":
34
+ create_dbos_hello_table()
dbos/_tracer.py CHANGED
@@ -1,13 +1,9 @@
1
1
  import os
2
2
  from typing import TYPE_CHECKING, Optional
3
3
 
4
- from opentelemetry import trace
5
- from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
6
- from opentelemetry.sdk.resources import Resource
7
- from opentelemetry.sdk.trace import TracerProvider
8
- from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
9
- from opentelemetry.semconv.resource import ResourceAttributes
10
- from opentelemetry.trace import Span
4
+ if TYPE_CHECKING:
5
+ from opentelemetry.trace import Span
6
+ from opentelemetry.sdk.trace import TracerProvider
11
7
 
12
8
  from dbos._utils import GlobalParams
13
9
 
@@ -29,34 +25,47 @@ class DBOSTracer:
29
25
  def config(self, config: ConfigFile) -> None:
30
26
  self.otlp_attributes = config.get("telemetry", {}).get("otlp_attributes", {}) # type: ignore
31
27
  self.disable_otlp = config.get("telemetry", {}).get("disable_otlp", False) # type: ignore
32
- if not self.disable_otlp and not isinstance(
33
- trace.get_tracer_provider(), TracerProvider
34
- ):
35
- resource = Resource(
36
- attributes={
37
- ResourceAttributes.SERVICE_NAME: config["name"],
38
- }
28
+ if not self.disable_otlp:
29
+ from opentelemetry import trace
30
+ from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
31
+ OTLPSpanExporter,
39
32
  )
40
-
41
- provider = TracerProvider(resource=resource)
42
- if os.environ.get("DBOS__CONSOLE_TRACES", None) is not None:
43
- processor = BatchSpanProcessor(ConsoleSpanExporter())
44
- provider.add_span_processor(processor)
45
- otlp_traces_endpoints = (
46
- config.get("telemetry", {}).get("OTLPExporter", {}).get("tracesEndpoint") # type: ignore
33
+ from opentelemetry.sdk.resources import Resource
34
+ from opentelemetry.sdk.trace import TracerProvider
35
+ from opentelemetry.sdk.trace.export import (
36
+ BatchSpanProcessor,
37
+ ConsoleSpanExporter,
47
38
  )
48
- if otlp_traces_endpoints:
49
- for e in otlp_traces_endpoints:
50
- processor = BatchSpanProcessor(OTLPSpanExporter(endpoint=e))
39
+ from opentelemetry.semconv.attributes.service_attributes import SERVICE_NAME
40
+
41
+ if not isinstance(trace.get_tracer_provider(), TracerProvider):
42
+ resource = Resource(
43
+ attributes={
44
+ SERVICE_NAME: config["name"],
45
+ }
46
+ )
47
+
48
+ provider = TracerProvider(resource=resource)
49
+ if os.environ.get("DBOS__CONSOLE_TRACES", None) is not None:
50
+ processor = BatchSpanProcessor(ConsoleSpanExporter())
51
51
  provider.add_span_processor(processor)
52
- trace.set_tracer_provider(provider)
53
-
54
- def set_provider(self, provider: Optional[TracerProvider]) -> None:
52
+ otlp_traces_endpoints = (
53
+ config.get("telemetry", {}).get("OTLPExporter", {}).get("tracesEndpoint") # type: ignore
54
+ )
55
+ if otlp_traces_endpoints:
56
+ for e in otlp_traces_endpoints:
57
+ processor = BatchSpanProcessor(OTLPSpanExporter(endpoint=e))
58
+ provider.add_span_processor(processor)
59
+ trace.set_tracer_provider(provider)
60
+
61
+ def set_provider(self, provider: "Optional[TracerProvider]") -> None:
55
62
  self.provider = provider
56
63
 
57
64
  def start_span(
58
- self, attributes: "TracedAttributes", parent: Optional[Span] = None
59
- ) -> Span:
65
+ self, attributes: "TracedAttributes", parent: "Optional[Span]" = None
66
+ ) -> "Span":
67
+ from opentelemetry import trace
68
+
60
69
  tracer = (
61
70
  self.provider.get_tracer("dbos-tracer")
62
71
  if self.provider is not None
@@ -74,11 +83,13 @@ class DBOSTracer:
74
83
  span.set_attribute(k, v)
75
84
  return span
76
85
 
77
- def end_span(self, span: Span) -> None:
86
+ def end_span(self, span: "Span") -> None:
78
87
  span.end()
79
88
 
80
- def get_current_span(self) -> Optional[Span]:
89
+ def get_current_span(self) -> "Optional[Span]":
81
90
  # Return the current active span if any. It might not be a DBOS span.
91
+ from opentelemetry import trace
92
+
82
93
  span = trace.get_current_span()
83
94
  if span.get_span_context().is_valid:
84
95
  return span
dbos/cli/_github_init.py CHANGED
@@ -1,8 +1,9 @@
1
+ import json
1
2
  import os
2
3
  from base64 import b64decode
3
- from typing import List, TypedDict
4
-
5
- import requests
4
+ from typing import Any, List, TypedDict
5
+ from urllib.error import HTTPError
6
+ from urllib.request import Request, urlopen
6
7
 
7
8
  DEMO_REPO_API = "https://api.github.com/repos/dbos-inc/dbos-demo-apps"
8
9
  PY_DEMO_PATH = "python/"
@@ -34,43 +35,48 @@ class GitHubItem(TypedDict):
34
35
  size: int
35
36
 
36
37
 
37
- def _fetch_github(url: str) -> requests.Response:
38
+ def _fetch_github(url: str) -> Any:
38
39
  headers = {}
39
40
  github_token = os.getenv("GITHUB_TOKEN")
40
41
  if github_token:
41
42
  headers["Authorization"] = f"Bearer {github_token}"
42
43
 
43
- response = requests.get(url, headers=headers)
44
+ request = Request(url, headers=headers)
45
+
46
+ try:
47
+ with urlopen(request) as response:
48
+ data = response.read()
49
+ return json.loads(data.decode("utf-8"))
50
+ except HTTPError as e:
51
+ # Read response headers
52
+ rate_limit_remaining = e.headers.get("x-ratelimit-remaining")
44
53
 
45
- if not response.ok:
46
- if response.headers.get("x-ratelimit-remaining") == "0":
54
+ if rate_limit_remaining == "0":
47
55
  raise Exception(
48
56
  "Error fetching from GitHub API: rate limit exceeded.\n"
49
57
  "Please wait a few minutes and try again.\n"
50
58
  "To increase the limit, you can create a personal access token and set it in the GITHUB_TOKEN environment variable.\n"
51
59
  "Details: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api"
52
60
  )
53
- elif response.status_code == 401:
61
+ elif e.code == 401:
54
62
  raise Exception(
55
- f"Error fetching content from GitHub {url}: {response.status_code} {response.reason}.\n"
63
+ f"Error fetching content from GitHub {url}: {e.code} {e.reason}.\n"
56
64
  "Please ensure your GITHUB_TOKEN environment variable is set to a valid personal access token."
57
65
  )
58
66
  raise Exception(
59
- f"Error fetching content from GitHub {url}: {response.status_code} {response.reason}"
67
+ f"Error fetching content from GitHub {url}: {e.code} {e.reason}"
60
68
  )
61
69
 
62
- return response
63
-
64
70
 
65
71
  def _fetch_github_tree(tag: str) -> List[GitHubTreeItem]:
66
- response = _fetch_github(f"{DEMO_REPO_API}/git/trees/{tag}?recursive=1")
67
- tree_data: GitHubTree = response.json()
72
+ tree_data: GitHubTree = _fetch_github(
73
+ f"{DEMO_REPO_API}/git/trees/{tag}?recursive=1"
74
+ )
68
75
  return tree_data["tree"]
69
76
 
70
77
 
71
78
  def _fetch_github_item(url: str) -> str:
72
- response = _fetch_github(url)
73
- item: GitHubItem = response.json()
79
+ item: GitHubItem = _fetch_github(url)
74
80
  return b64decode(item["content"]).decode("utf-8")
75
81
 
76
82
 
@@ -2,10 +2,6 @@ import os
2
2
  import shutil
3
3
  import typing
4
4
  from os import path
5
- from typing import Any
6
-
7
- import tomlkit
8
- from rich import print
9
5
 
10
6
  from dbos._dbos_config import _app_name_to_db_name
11
7
 
@@ -46,7 +42,7 @@ def _copy_template_dir(src_dir: str, dst_dir: str, ctx: dict[str, str]) -> None:
46
42
 
47
43
  dst = path.join(dst_root, base if ext == ".dbos" else file)
48
44
  if path.exists(dst):
49
- print(f"[yellow]File {dst} already exists, skipping[/yellow]")
45
+ print(f"File {dst} already exists, skipping")
50
46
  continue
51
47
 
52
48
  if ext == ".dbos":
@@ -62,7 +58,7 @@ def copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
62
58
  package_name = project_name.replace("-", "_")
63
59
  default_migration_section = """database:
64
60
  migrate:
65
- - alembic upgrade head
61
+ - python3 migrations/create_table.py
66
62
  """
67
63
  ctx = {
68
64
  "project_name": project_name,
@@ -89,18 +85,11 @@ def copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
89
85
 
90
86
  def get_project_name() -> typing.Union[str, None]:
91
87
  name = None
88
+
92
89
  try:
93
- with open("pyproject.toml", "rb") as file:
94
- pyproj = typing.cast(dict[str, Any], tomlkit.load(file))
95
- name = typing.cast(str, pyproj["project"]["name"])
90
+ _, parent = path.split(path.abspath("."))
91
+ name = parent
96
92
  except:
97
93
  pass
98
94
 
99
- if name == None:
100
- try:
101
- _, parent = path.split(path.abspath("."))
102
- name = parent
103
- except:
104
- pass
105
-
106
95
  return name
dbos/cli/cli.py CHANGED
@@ -1,3 +1,4 @@
1
+ import json
1
2
  import os
2
3
  import platform
3
4
  import signal
@@ -5,14 +6,10 @@ import subprocess
5
6
  import time
6
7
  import typing
7
8
  from os import path
8
- from typing import Any, Optional, Tuple
9
+ from typing import Annotated, Any, List, Optional, Tuple
9
10
 
10
- import jsonpickle # type: ignore
11
11
  import sqlalchemy as sa
12
12
  import typer
13
- from rich import print as richprint
14
- from rich.prompt import IntPrompt
15
- from typing_extensions import Annotated, List
16
13
 
17
14
  from dbos._context import SetWorkflowID
18
15
  from dbos._debug import debug_workflow, parse_start_command
@@ -34,6 +31,11 @@ from ..cli._github_init import create_template_from_github
34
31
  from ._template_init import copy_template, get_project_name, get_templates_directory
35
32
 
36
33
 
34
+ class DefaultEncoder(json.JSONEncoder):
35
+ def default(self, obj: Any) -> str:
36
+ return str(obj)
37
+
38
+
37
39
  def _get_db_url(
38
40
  *, system_database_url: Optional[str], application_database_url: Optional[str]
39
41
  ) -> Tuple[str, str]:
@@ -201,7 +203,7 @@ def init(
201
203
  path.join(templates_dir, template), project_name, config_mode=config
202
204
  )
203
205
  except Exception as e:
204
- richprint(f"[red]{e}[/red]")
206
+ print(e)
205
207
 
206
208
 
207
209
  def _resolve_project_name_and_template(
@@ -222,27 +224,21 @@ def _resolve_project_name_and_template(
222
224
  if template not in templates:
223
225
  raise Exception(f"Template {template} not found in {templates_dir}")
224
226
  else:
225
- richprint("\n[bold]Available templates:[/bold]")
227
+ print("\nAvailable templates:")
226
228
  for idx, template_name in enumerate(templates, 1):
227
- richprint(f" {idx}. {template_name}")
229
+ print(f" {idx}. {template_name}")
228
230
  while True:
229
231
  try:
230
- choice = IntPrompt.ask(
231
- "\nSelect template number",
232
- show_choices=False,
233
- show_default=False,
234
- )
232
+ choice = int(input("\nSelect template number: "))
235
233
  if 1 <= choice <= len(templates):
236
234
  template = templates[choice - 1]
237
235
  break
238
236
  else:
239
- richprint(
240
- "[red]Invalid selection. Please choose a number from the list.[/red]"
241
- )
237
+ print("Invalid selection. Please choose a number from the list.")
242
238
  except (KeyboardInterrupt, EOFError):
243
239
  raise typer.Abort()
244
240
  except ValueError:
245
- richprint("[red]Please enter a valid number.[/red]")
241
+ print("Please enter a valid number.")
246
242
 
247
243
  if template in git_templates:
248
244
  if project_name is None:
@@ -499,7 +495,7 @@ def list(
499
495
  app_version=appversion,
500
496
  name=name,
501
497
  )
502
- print(jsonpickle.encode(workflows, unpicklable=False))
498
+ print(json.dumps([w.__dict__ for w in workflows], cls=DefaultEncoder))
503
499
 
504
500
 
505
501
  @workflow.command(help="Retrieve the status of a workflow")
@@ -531,7 +527,7 @@ def get(
531
527
  system_database_url=system_database_url,
532
528
  )
533
529
  status = client.retrieve_workflow(workflow_id=workflow_id).get_status()
534
- print(jsonpickle.encode(status, unpicklable=False))
530
+ print(json.dumps(status.__dict__, cls=DefaultEncoder))
535
531
 
536
532
 
537
533
  @workflow.command(help="List the steps of a workflow")
@@ -562,12 +558,8 @@ def steps(
562
558
  application_database_url=application_database_url,
563
559
  system_database_url=system_database_url,
564
560
  )
565
- print(
566
- jsonpickle.encode(
567
- client.list_workflow_steps(workflow_id=workflow_id),
568
- unpicklable=False,
569
- )
570
- )
561
+ steps = client.list_workflow_steps(workflow_id=workflow_id)
562
+ print(json.dumps(steps, cls=DefaultEncoder))
571
563
 
572
564
 
573
565
  @workflow.command(
@@ -665,7 +657,7 @@ def restart(
665
657
  system_database_url=system_database_url,
666
658
  )
667
659
  status = client.fork_workflow(workflow_id=workflow_id, start_step=1).get_status()
668
- print(jsonpickle.encode(status, unpicklable=False))
660
+ print(json.dumps(status.__dict__, cls=DefaultEncoder))
669
661
 
670
662
 
671
663
  @workflow.command(
@@ -736,7 +728,7 @@ def fork(
736
728
  start_step=step,
737
729
  application_version=application_version,
738
730
  ).get_status()
739
- print(jsonpickle.encode(status, unpicklable=False))
731
+ print(json.dumps(status.__dict__, cls=DefaultEncoder))
740
732
 
741
733
 
742
734
  @queue.command(name="list", help="List enqueued functions for your application")
@@ -836,7 +828,7 @@ def list_queue(
836
828
  status=status,
837
829
  name=name,
838
830
  )
839
- print(jsonpickle.encode(workflows, unpicklable=False))
831
+ print(json.dumps([w.__dict__ for w in workflows], cls=DefaultEncoder))
840
832
 
841
833
 
842
834
  if __name__ == "__main__":
@@ -1,28 +1,20 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.14.0a9
3
+ Version: 1.15.0a1
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
7
- Requires-Python: >=3.9
7
+ Requires-Python: >=3.10
8
8
  Requires-Dist: pyyaml>=6.0.2
9
- Requires-Dist: jsonschema>=4.23.0
10
- Requires-Dist: alembic>=1.13.3
11
- Requires-Dist: typing-extensions>=4.12.2; python_version < "3.10"
12
- Requires-Dist: typer>=0.12.5
13
- Requires-Dist: jsonpickle>=3.3.0
14
- Requires-Dist: opentelemetry-api>=1.27.0
15
- Requires-Dist: opentelemetry-sdk>=1.27.0
16
- Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.27.0
17
9
  Requires-Dist: python-dateutil>=2.9.0.post0
18
- Requires-Dist: fastapi[standard]>=0.115.2
19
- Requires-Dist: tomlkit>=0.13.2
20
10
  Requires-Dist: psycopg[binary]>=3.1
21
- Requires-Dist: docker>=7.1.0
22
- Requires-Dist: cryptography>=43.0.3
23
- Requires-Dist: rich>=13.9.4
24
- Requires-Dist: pyjwt>=2.10.1
25
11
  Requires-Dist: websockets>=14.0
12
+ Requires-Dist: typer-slim>=0.17.4
13
+ Requires-Dist: sqlalchemy>=2.0.43
14
+ Provides-Extra: otel
15
+ Requires-Dist: opentelemetry-api>=1.37.0; extra == "otel"
16
+ Requires-Dist: opentelemetry-sdk>=1.37.0; extra == "otel"
17
+ Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.37.0; extra == "otel"
26
18
  Description-Content-Type: text/markdown
27
19
 
28
20