dbos 1.14.0a9__py3-none-any.whl → 1.15.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (47) hide show
  1. dbos/_client.py +30 -35
  2. dbos/_context.py +12 -6
  3. dbos/_core.py +5 -8
  4. dbos/_dbos.py +15 -27
  5. dbos/_dbos_config.py +32 -42
  6. dbos/_debouncer.py +1 -7
  7. dbos/_debug.py +0 -8
  8. dbos/_docker_pg_helper.py +93 -51
  9. dbos/_fastapi.py +5 -1
  10. dbos/_logger.py +18 -21
  11. dbos/_migration.py +4 -41
  12. dbos/_serialization.py +19 -30
  13. dbos/_sys_db_postgres.py +2 -9
  14. dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +34 -0
  15. dbos/_tracer.py +42 -31
  16. dbos/_workflow_commands.py +9 -5
  17. dbos/cli/_github_init.py +22 -16
  18. dbos/cli/_template_init.py +5 -16
  19. dbos/cli/cli.py +27 -33
  20. dbos/cli/migration.py +15 -10
  21. {dbos-1.14.0a9.dist-info → dbos-1.15.0a2.dist-info}/METADATA +8 -16
  22. dbos-1.15.0a2.dist-info/RECORD +59 -0
  23. dbos/_alembic_migrations/env.py +0 -62
  24. dbos/_alembic_migrations/script.py.mako +0 -26
  25. dbos/_alembic_migrations/versions/01ce9f07bd10_streaming.py +0 -42
  26. dbos/_alembic_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -34
  27. dbos/_alembic_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -45
  28. dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py +0 -35
  29. dbos/_alembic_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -35
  30. dbos/_alembic_migrations/versions/5c361fc04708_added_system_tables.py +0 -193
  31. dbos/_alembic_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -71
  32. dbos/_alembic_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -44
  33. dbos/_alembic_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -35
  34. dbos/_alembic_migrations/versions/a3b18ad34abe_added_triggers.py +0 -72
  35. dbos/_alembic_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -43
  36. dbos/_alembic_migrations/versions/d76646551a6c_workflow_queue.py +0 -28
  37. dbos/_alembic_migrations/versions/d994145b47b6_consolidate_inputs.py +0 -30
  38. dbos/_alembic_migrations/versions/eab0cc1d9a14_job_queue.py +0 -56
  39. dbos/_alembic_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -46
  40. dbos/_templates/dbos-db-starter/alembic.ini +0 -116
  41. dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -85
  42. dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -26
  43. dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -35
  44. dbos-1.14.0a9.dist-info/RECORD +0 -79
  45. {dbos-1.14.0a9.dist-info → dbos-1.15.0a2.dist-info}/WHEEL +0 -0
  46. {dbos-1.14.0a9.dist-info → dbos-1.15.0a2.dist-info}/entry_points.txt +0 -0
  47. {dbos-1.14.0a9.dist-info → dbos-1.15.0a2.dist-info}/licenses/LICENSE +0 -0
dbos/cli/_github_init.py CHANGED
@@ -1,8 +1,9 @@
1
+ import json
1
2
  import os
2
3
  from base64 import b64decode
3
- from typing import List, TypedDict
4
-
5
- import requests
4
+ from typing import Any, List, TypedDict
5
+ from urllib.error import HTTPError
6
+ from urllib.request import Request, urlopen
6
7
 
7
8
  DEMO_REPO_API = "https://api.github.com/repos/dbos-inc/dbos-demo-apps"
8
9
  PY_DEMO_PATH = "python/"
@@ -34,43 +35,48 @@ class GitHubItem(TypedDict):
34
35
  size: int
35
36
 
36
37
 
37
- def _fetch_github(url: str) -> requests.Response:
38
+ def _fetch_github(url: str) -> Any:
38
39
  headers = {}
39
40
  github_token = os.getenv("GITHUB_TOKEN")
40
41
  if github_token:
41
42
  headers["Authorization"] = f"Bearer {github_token}"
42
43
 
43
- response = requests.get(url, headers=headers)
44
+ request = Request(url, headers=headers)
45
+
46
+ try:
47
+ with urlopen(request) as response:
48
+ data = response.read()
49
+ return json.loads(data.decode("utf-8"))
50
+ except HTTPError as e:
51
+ # Read response headers
52
+ rate_limit_remaining = e.headers.get("x-ratelimit-remaining")
44
53
 
45
- if not response.ok:
46
- if response.headers.get("x-ratelimit-remaining") == "0":
54
+ if rate_limit_remaining == "0":
47
55
  raise Exception(
48
56
  "Error fetching from GitHub API: rate limit exceeded.\n"
49
57
  "Please wait a few minutes and try again.\n"
50
58
  "To increase the limit, you can create a personal access token and set it in the GITHUB_TOKEN environment variable.\n"
51
59
  "Details: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api"
52
60
  )
53
- elif response.status_code == 401:
61
+ elif e.code == 401:
54
62
  raise Exception(
55
- f"Error fetching content from GitHub {url}: {response.status_code} {response.reason}.\n"
63
+ f"Error fetching content from GitHub {url}: {e.code} {e.reason}.\n"
56
64
  "Please ensure your GITHUB_TOKEN environment variable is set to a valid personal access token."
57
65
  )
58
66
  raise Exception(
59
- f"Error fetching content from GitHub {url}: {response.status_code} {response.reason}"
67
+ f"Error fetching content from GitHub {url}: {e.code} {e.reason}"
60
68
  )
61
69
 
62
- return response
63
-
64
70
 
65
71
  def _fetch_github_tree(tag: str) -> List[GitHubTreeItem]:
66
- response = _fetch_github(f"{DEMO_REPO_API}/git/trees/{tag}?recursive=1")
67
- tree_data: GitHubTree = response.json()
72
+ tree_data: GitHubTree = _fetch_github(
73
+ f"{DEMO_REPO_API}/git/trees/{tag}?recursive=1"
74
+ )
68
75
  return tree_data["tree"]
69
76
 
70
77
 
71
78
  def _fetch_github_item(url: str) -> str:
72
- response = _fetch_github(url)
73
- item: GitHubItem = response.json()
79
+ item: GitHubItem = _fetch_github(url)
74
80
  return b64decode(item["content"]).decode("utf-8")
75
81
 
76
82
 
@@ -2,10 +2,6 @@ import os
2
2
  import shutil
3
3
  import typing
4
4
  from os import path
5
- from typing import Any
6
-
7
- import tomlkit
8
- from rich import print
9
5
 
10
6
  from dbos._dbos_config import _app_name_to_db_name
11
7
 
@@ -46,7 +42,7 @@ def _copy_template_dir(src_dir: str, dst_dir: str, ctx: dict[str, str]) -> None:
46
42
 
47
43
  dst = path.join(dst_root, base if ext == ".dbos" else file)
48
44
  if path.exists(dst):
49
- print(f"[yellow]File {dst} already exists, skipping[/yellow]")
45
+ print(f"File {dst} already exists, skipping")
50
46
  continue
51
47
 
52
48
  if ext == ".dbos":
@@ -62,7 +58,7 @@ def copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
62
58
  package_name = project_name.replace("-", "_")
63
59
  default_migration_section = """database:
64
60
  migrate:
65
- - alembic upgrade head
61
+ - python3 migrations/create_table.py
66
62
  """
67
63
  ctx = {
68
64
  "project_name": project_name,
@@ -89,18 +85,11 @@ def copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
89
85
 
90
86
  def get_project_name() -> typing.Union[str, None]:
91
87
  name = None
88
+
92
89
  try:
93
- with open("pyproject.toml", "rb") as file:
94
- pyproj = typing.cast(dict[str, Any], tomlkit.load(file))
95
- name = typing.cast(str, pyproj["project"]["name"])
90
+ _, parent = path.split(path.abspath("."))
91
+ name = parent
96
92
  except:
97
93
  pass
98
94
 
99
- if name == None:
100
- try:
101
- _, parent = path.split(path.abspath("."))
102
- name = parent
103
- except:
104
- pass
105
-
106
95
  return name
dbos/cli/cli.py CHANGED
@@ -1,3 +1,4 @@
1
+ import json
1
2
  import os
2
3
  import platform
3
4
  import signal
@@ -5,14 +6,10 @@ import subprocess
5
6
  import time
6
7
  import typing
7
8
  from os import path
8
- from typing import Any, Optional, Tuple
9
+ from typing import Annotated, Any, List, Optional, Tuple
9
10
 
10
- import jsonpickle # type: ignore
11
11
  import sqlalchemy as sa
12
12
  import typer
13
- from rich import print as richprint
14
- from rich.prompt import IntPrompt
15
- from typing_extensions import Annotated, List
16
13
 
17
14
  from dbos._context import SetWorkflowID
18
15
  from dbos._debug import debug_workflow, parse_start_command
@@ -34,9 +31,14 @@ from ..cli._github_init import create_template_from_github
34
31
  from ._template_init import copy_template, get_project_name, get_templates_directory
35
32
 
36
33
 
34
+ class DefaultEncoder(json.JSONEncoder):
35
+ def default(self, obj: Any) -> str:
36
+ return str(obj)
37
+
38
+
37
39
  def _get_db_url(
38
40
  *, system_database_url: Optional[str], application_database_url: Optional[str]
39
- ) -> Tuple[str, str]:
41
+ ) -> Tuple[str, str | None]:
40
42
  """
41
43
  Get the database URL to use for the DBOS application.
42
44
  Order of precedence:
@@ -201,7 +203,7 @@ def init(
201
203
  path.join(templates_dir, template), project_name, config_mode=config
202
204
  )
203
205
  except Exception as e:
204
- richprint(f"[red]{e}[/red]")
206
+ print(e)
205
207
 
206
208
 
207
209
  def _resolve_project_name_and_template(
@@ -222,27 +224,21 @@ def _resolve_project_name_and_template(
222
224
  if template not in templates:
223
225
  raise Exception(f"Template {template} not found in {templates_dir}")
224
226
  else:
225
- richprint("\n[bold]Available templates:[/bold]")
227
+ print("\nAvailable templates:")
226
228
  for idx, template_name in enumerate(templates, 1):
227
- richprint(f" {idx}. {template_name}")
229
+ print(f" {idx}. {template_name}")
228
230
  while True:
229
231
  try:
230
- choice = IntPrompt.ask(
231
- "\nSelect template number",
232
- show_choices=False,
233
- show_default=False,
234
- )
232
+ choice = int(input("\nSelect template number: "))
235
233
  if 1 <= choice <= len(templates):
236
234
  template = templates[choice - 1]
237
235
  break
238
236
  else:
239
- richprint(
240
- "[red]Invalid selection. Please choose a number from the list.[/red]"
241
- )
237
+ print("Invalid selection. Please choose a number from the list.")
242
238
  except (KeyboardInterrupt, EOFError):
243
239
  raise typer.Abort()
244
240
  except ValueError:
245
- richprint("[red]Please enter a valid number.[/red]")
241
+ print("Please enter a valid number.")
246
242
 
247
243
  if template in git_templates:
248
244
  if project_name is None:
@@ -298,7 +294,8 @@ def migrate(
298
294
  )
299
295
 
300
296
  typer.echo(f"Starting DBOS migrations")
301
- typer.echo(f"Application database: {sa.make_url(application_database_url)}")
297
+ if application_database_url:
298
+ typer.echo(f"Application database: {sa.make_url(application_database_url)}")
302
299
  typer.echo(f"System database: {sa.make_url(system_database_url)}")
303
300
 
304
301
  # First, run DBOS migrations on the system database and the application database
@@ -309,9 +306,10 @@ def migrate(
309
306
 
310
307
  # Next, assign permissions on the DBOS schema to the application role, if any
311
308
  if application_role:
312
- grant_dbos_schema_permissions(
313
- database_url=application_database_url, role_name=application_role
314
- )
309
+ if application_database_url:
310
+ grant_dbos_schema_permissions(
311
+ database_url=application_database_url, role_name=application_role
312
+ )
315
313
  grant_dbos_schema_permissions(
316
314
  database_url=system_database_url, role_name=application_role
317
315
  )
@@ -499,7 +497,7 @@ def list(
499
497
  app_version=appversion,
500
498
  name=name,
501
499
  )
502
- print(jsonpickle.encode(workflows, unpicklable=False))
500
+ print(json.dumps([w.__dict__ for w in workflows], cls=DefaultEncoder))
503
501
 
504
502
 
505
503
  @workflow.command(help="Retrieve the status of a workflow")
@@ -531,7 +529,7 @@ def get(
531
529
  system_database_url=system_database_url,
532
530
  )
533
531
  status = client.retrieve_workflow(workflow_id=workflow_id).get_status()
534
- print(jsonpickle.encode(status, unpicklable=False))
532
+ print(json.dumps(status.__dict__, cls=DefaultEncoder))
535
533
 
536
534
 
537
535
  @workflow.command(help="List the steps of a workflow")
@@ -562,12 +560,8 @@ def steps(
562
560
  application_database_url=application_database_url,
563
561
  system_database_url=system_database_url,
564
562
  )
565
- print(
566
- jsonpickle.encode(
567
- client.list_workflow_steps(workflow_id=workflow_id),
568
- unpicklable=False,
569
- )
570
- )
563
+ steps = client.list_workflow_steps(workflow_id=workflow_id)
564
+ print(json.dumps(steps, cls=DefaultEncoder))
571
565
 
572
566
 
573
567
  @workflow.command(
@@ -665,7 +659,7 @@ def restart(
665
659
  system_database_url=system_database_url,
666
660
  )
667
661
  status = client.fork_workflow(workflow_id=workflow_id, start_step=1).get_status()
668
- print(jsonpickle.encode(status, unpicklable=False))
662
+ print(json.dumps(status.__dict__, cls=DefaultEncoder))
669
663
 
670
664
 
671
665
  @workflow.command(
@@ -736,7 +730,7 @@ def fork(
736
730
  start_step=step,
737
731
  application_version=application_version,
738
732
  ).get_status()
739
- print(jsonpickle.encode(status, unpicklable=False))
733
+ print(json.dumps(status.__dict__, cls=DefaultEncoder))
740
734
 
741
735
 
742
736
  @queue.command(name="list", help="List enqueued functions for your application")
@@ -836,7 +830,7 @@ def list_queue(
836
830
  status=status,
837
831
  name=name,
838
832
  )
839
- print(jsonpickle.encode(workflows, unpicklable=False))
833
+ print(json.dumps([w.__dict__ for w in workflows], cls=DefaultEncoder))
840
834
 
841
835
 
842
836
  if __name__ == "__main__":
dbos/cli/migration.py CHANGED
@@ -1,3 +1,5 @@
1
+ from typing import Optional
2
+
1
3
  import sqlalchemy as sa
2
4
  import typer
3
5
 
@@ -5,7 +7,9 @@ from dbos._app_db import ApplicationDatabase
5
7
  from dbos._sys_db import SystemDatabase
6
8
 
7
9
 
8
- def migrate_dbos_databases(app_database_url: str, system_database_url: str) -> None:
10
+ def migrate_dbos_databases(
11
+ app_database_url: Optional[str], system_database_url: str
12
+ ) -> None:
9
13
  app_db = None
10
14
  sys_db = None
11
15
  try:
@@ -17,16 +21,17 @@ def migrate_dbos_databases(app_database_url: str, system_database_url: str) -> N
17
21
  "pool_size": 2,
18
22
  },
19
23
  )
20
- app_db = ApplicationDatabase.create(
21
- database_url=app_database_url,
22
- engine_kwargs={
23
- "pool_timeout": 30,
24
- "max_overflow": 0,
25
- "pool_size": 2,
26
- },
27
- )
28
24
  sys_db.run_migrations()
29
- app_db.run_migrations()
25
+ if app_database_url:
26
+ app_db = ApplicationDatabase.create(
27
+ database_url=app_database_url,
28
+ engine_kwargs={
29
+ "pool_timeout": 30,
30
+ "max_overflow": 0,
31
+ "pool_size": 2,
32
+ },
33
+ )
34
+ app_db.run_migrations()
30
35
  except Exception as e:
31
36
  typer.echo(f"DBOS migrations failed: {e}")
32
37
  raise typer.Exit(code=1)
@@ -1,28 +1,20 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.14.0a9
3
+ Version: 1.15.0a2
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
7
- Requires-Python: >=3.9
7
+ Requires-Python: >=3.10
8
8
  Requires-Dist: pyyaml>=6.0.2
9
- Requires-Dist: jsonschema>=4.23.0
10
- Requires-Dist: alembic>=1.13.3
11
- Requires-Dist: typing-extensions>=4.12.2; python_version < "3.10"
12
- Requires-Dist: typer>=0.12.5
13
- Requires-Dist: jsonpickle>=3.3.0
14
- Requires-Dist: opentelemetry-api>=1.27.0
15
- Requires-Dist: opentelemetry-sdk>=1.27.0
16
- Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.27.0
17
9
  Requires-Dist: python-dateutil>=2.9.0.post0
18
- Requires-Dist: fastapi[standard]>=0.115.2
19
- Requires-Dist: tomlkit>=0.13.2
20
10
  Requires-Dist: psycopg[binary]>=3.1
21
- Requires-Dist: docker>=7.1.0
22
- Requires-Dist: cryptography>=43.0.3
23
- Requires-Dist: rich>=13.9.4
24
- Requires-Dist: pyjwt>=2.10.1
25
11
  Requires-Dist: websockets>=14.0
12
+ Requires-Dist: typer-slim>=0.17.4
13
+ Requires-Dist: sqlalchemy>=2.0.43
14
+ Provides-Extra: otel
15
+ Requires-Dist: opentelemetry-api>=1.37.0; extra == "otel"
16
+ Requires-Dist: opentelemetry-sdk>=1.37.0; extra == "otel"
17
+ Requires-Dist: opentelemetry-exporter-otlp-proto-http>=1.37.0; extra == "otel"
26
18
  Description-Content-Type: text/markdown
27
19
 
28
20
 
@@ -0,0 +1,59 @@
1
+ dbos-1.15.0a2.dist-info/METADATA,sha256=STEFFiAPM8uZbNbNRPz81v65hcKlFKT1hiL52v7yjo8,13021
2
+ dbos-1.15.0a2.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
+ dbos-1.15.0a2.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-1.15.0a2.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
+ dbos/__init__.py,sha256=pT4BuNLDCrIQX27vQG8NlfxX6PZRU7r9miq4thJTszU,982
6
+ dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
+ dbos/_admin_server.py,sha256=e8ELhcDWqR3_PNobnNgUvLGh5lzZq0yFSF6dvtzoQRI,16267
8
+ dbos/_app_db.py,sha256=GsV-uYU0QsChWwQDxnrh8_iiZ_zMQB-bsP2jPGIe2aM,16094
9
+ dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
+ dbos/_client.py,sha256=zfFQhj_mf4NS85Nlf1xbXwMQVX_mu3UaU7sb8uE5peM,18794
11
+ dbos/_conductor/conductor.py,sha256=3E_hL3c9g9yWqKZkvI6KA0-ZzPMPRo06TOzT1esMiek,24114
12
+ dbos/_conductor/protocol.py,sha256=q3rgLxINFtWFigdOONc-4gX4vn66UmMlJQD6Kj8LnL4,7420
13
+ dbos/_context.py,sha256=cJDxVbswTLXKE5MV4Hmg6gpIX3Dd5mBTG-4lmofWP9E,27668
14
+ dbos/_core.py,sha256=13DNN_fpSIs42NquV80XsHV7yKwY_adKP03h_xhXok4,50493
15
+ dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
+ dbos/_dbos.py,sha256=ZrNrFUoND2t8UorqPLjeDvfOP73AasOXqpeuSS6Rz7E,57836
17
+ dbos/_dbos_config.py,sha256=sWXd9RuWGmhkd7j2SraxDWQir_-F2p0SIqGO61ILeyk,25391
18
+ dbos/_debouncer.py,sha256=VmGq1_ZIQ79fnH14LEhdoqxKWp6rlEwzsUwumwAMgTQ,15095
19
+ dbos/_debug.py,sha256=0MfgNqutCUhI4PEmmra9x7f3DiFE_0nscfUCHdLimEY,1415
20
+ dbos/_docker_pg_helper.py,sha256=xySum4hTA8TVMBODoG19u4cXQAB1vCock-jwM2pnmSI,7791
21
+ dbos/_error.py,sha256=GwO0Ng4d4iB52brY09-Ss6Cz_V28Xc0D0cRCzZ6XmNM,8688
22
+ dbos/_event_loop.py,sha256=cvaFN9-II3MsHEOq8QoICc_8qSKrjikMlLfuhC3Y8Dk,2923
23
+ dbos/_fastapi.py,sha256=toYYfbe2aui2aHw0021PoXi2dKlI6NzO3M3pHB0dHOk,3421
24
+ dbos/_flask.py,sha256=Npnakt-a3W5OykONFRkDRnumaDhTQmA0NPdUCGRYKXE,1652
25
+ dbos/_kafka.py,sha256=Gm4fHWl7gYb-i5BMvwNwm5Km3z8zQpseqdMgqgFjlGI,4252
26
+ dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
27
+ dbos/_logger.py,sha256=djnCp147QoQ1iG9Bt3Uz8RyGaXGmi6gebccXsrA6Cps,4660
28
+ dbos/_migration.py,sha256=LgxWPtXqRRwjvS5CrSvQ81B_UzLvRNWd4fnQ_Wo-gek,9507
29
+ dbos/_outcome.py,sha256=7HvosMfEHTh1U5P6xok7kFTGLwa2lPaul0YApb3UnN4,8191
30
+ dbos/_queue.py,sha256=0kJTPwXy3nZ4Epzt-lHky9M9S4L31645drPGFR8fIJY,4854
31
+ dbos/_recovery.py,sha256=K-wlFhdf4yGRm6cUzyhcTjQUS0xp2T5rdNMLiiBErYg,2882
32
+ dbos/_registrations.py,sha256=bEOntObnWaBylnebr5ZpcX2hk7OVLDd1z4BvW4_y3zA,7380
33
+ dbos/_roles.py,sha256=kCuhhg8XLtrHCgKgm44I0abIRTGHltf88OwjEKAUggk,2317
34
+ dbos/_scheduler.py,sha256=CWeGVfl9h51VXfxt80y5Da_5pE8SPty_AYkfpJkkMxQ,2117
35
+ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
+ dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
37
+ dbos/_schemas/system_database.py,sha256=-dAKk-_Y3vzbpLT4ei-sIrBQgFyQiwPj1enZb1TYc8I,4943
38
+ dbos/_serialization.py,sha256=GLgWLtHpvk7nSHyXukVQLE1ASNA3CJBtfF8w6iflBDw,3590
39
+ dbos/_sys_db.py,sha256=SspVk-wYmE6xZLuyYQUclwh_AMjnkDXcog5g5WmYn7c,83036
40
+ dbos/_sys_db_postgres.py,sha256=CcvxWzoByEvCZ2P_P-KNBRcyJ_8vSpCjtHBRmc7l5hI,7324
41
+ dbos/_sys_db_sqlite.py,sha256=xT9l-czMhLmfuu5UcnBzAyUxSFgzt3XtEWx9t_D8mZs,7361
42
+ dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
43
+ dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
+ dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
45
+ dbos/_templates/dbos-db-starter/__package/schema.py,sha256=7Z27JGC8yy7Z44cbVXIREYxtUhU4JVkLCp5Q7UahVQ0,260
46
+ dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos,sha256=0wPktElM7kMB3OPHTXw4xBk9bgGKMqOHrrr7x_R23Z8,446
47
+ dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos,sha256=pVm2Q0AsxS8pg85llbrXFD6jMccMqGjhGRjTEvS-hXk,942
48
+ dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
49
+ dbos/_tracer.py,sha256=PHbD7iTEkHk7z4B9hc-wPgi2dPTeI1rhZgLI33TQEeM,3786
50
+ dbos/_utils.py,sha256=ZdoM1MDbHnlJrh31zfhp3iX62bAxK1kyvMwXnltC_84,1779
51
+ dbos/_workflow_commands.py,sha256=k-i1bCfNrux43BHLT8wQ-l-MVZX3D6LGZLH7-uuiDRo,4951
52
+ dbos/cli/_github_init.py,sha256=R_94Fnn40CAmPy-zM00lwHi0ndyfv57TmIooADjmag4,3378
53
+ dbos/cli/_template_init.py,sha256=AltKk256VocgvxLpuTxpjJyACrdHFjbGoqYhHzeLae4,2649
54
+ dbos/cli/cli.py,sha256=8fn8hseZWWseJiJMo21_mWYfMqgM2y7l_3UbMP0YNMI,26724
55
+ dbos/cli/migration.py,sha256=vaYxHy0k5KgEuoOQUl6R9oxKv4V5nKKpaVhRbkLDXpo,3440
56
+ dbos/dbos-config.schema.json,sha256=LyUT1DOTaAwOP6suxQGS5KemVIqXGPyu_q7Hbo0neA8,6192
57
+ dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
58
+ version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
59
+ dbos-1.15.0a2.dist-info/RECORD,,
@@ -1,62 +0,0 @@
1
- from alembic import context
2
- from sqlalchemy import engine_from_config, pool
3
-
4
- # this is the Alembic Config object, which provides
5
- # access to the values within the .ini file in use.
6
- config = context.config
7
-
8
- # add your model's MetaData object here
9
- # for 'autogenerate' support
10
- target_metadata = None
11
-
12
-
13
- def run_migrations_offline() -> None:
14
- """
15
- Run migrations in 'offline' mode.
16
-
17
- This configures the context with just a URL
18
- and not an Engine, though an Engine is acceptable
19
- here as well. By skipping the Engine creation
20
- we don't even need a DBAPI to be available.
21
-
22
- Calls to context.execute() here emit the given string to the
23
- script output.
24
- """
25
-
26
- url = config.get_main_option("sqlalchemy.url")
27
- context.configure(
28
- url=url,
29
- target_metadata=target_metadata,
30
- literal_binds=True,
31
- dialect_opts={"paramstyle": "named"},
32
- )
33
-
34
- with context.begin_transaction():
35
- context.run_migrations()
36
-
37
-
38
- def run_migrations_online() -> None:
39
- """
40
- Run migrations in 'online' mode.
41
-
42
- In this scenario we need to create an Engine
43
- and associate a connection with the context.
44
- """
45
-
46
- connectable = engine_from_config(
47
- config.get_section(config.config_ini_section, {}),
48
- prefix="sqlalchemy.",
49
- poolclass=pool.NullPool,
50
- )
51
-
52
- with connectable.connect() as connection:
53
- context.configure(connection=connection, target_metadata=target_metadata)
54
-
55
- with context.begin_transaction():
56
- context.run_migrations()
57
-
58
-
59
- if context.is_offline_mode():
60
- run_migrations_offline()
61
- else:
62
- run_migrations_online()
@@ -1,26 +0,0 @@
1
- """${message}
2
-
3
- Revision ID: ${up_revision}
4
- Revises: ${down_revision | comma,n}
5
- Create Date: ${create_date}
6
-
7
- """
8
- from typing import Sequence, Union
9
-
10
- from alembic import op
11
- import sqlalchemy as sa
12
- ${imports if imports else ""}
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = ${repr(up_revision)}
16
- down_revision: Union[str, None] = ${repr(down_revision)}
17
- branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18
- depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19
-
20
-
21
- def upgrade() -> None:
22
- ${upgrades if upgrades else "pass"}
23
-
24
-
25
- def downgrade() -> None:
26
- ${downgrades if downgrades else "pass"}
@@ -1,42 +0,0 @@
1
- """streaming
2
-
3
- Revision ID: 01ce9f07bd10
4
- Revises: d994145b47b6
5
- Create Date: 2025-08-05 10:20:46.424975
6
-
7
- """
8
-
9
- from typing import Sequence, Union
10
-
11
- import sqlalchemy as sa
12
- from alembic import op
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = "01ce9f07bd10"
16
- down_revision: Union[str, None] = "d994145b47b6"
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- # Create streams table
23
- op.create_table(
24
- "streams",
25
- sa.Column("workflow_uuid", sa.Text(), nullable=False),
26
- sa.Column("key", sa.Text(), nullable=False),
27
- sa.Column("value", sa.Text(), nullable=False),
28
- sa.Column("offset", sa.Integer(), nullable=False),
29
- sa.ForeignKeyConstraint(
30
- ["workflow_uuid"],
31
- ["dbos.workflow_status.workflow_uuid"],
32
- onupdate="CASCADE",
33
- ondelete="CASCADE",
34
- ),
35
- sa.PrimaryKeyConstraint("workflow_uuid", "key", "offset"),
36
- schema="dbos",
37
- )
38
-
39
-
40
- def downgrade() -> None:
41
- # Drop streams table
42
- op.drop_table("streams", schema="dbos")
@@ -1,34 +0,0 @@
1
- """workflow_queues_executor_id
2
-
3
- Revision ID: 04ca4f231047
4
- Revises: d76646551a6c
5
- Create Date: 2025-01-15 15:05:08.043190
6
-
7
- """
8
-
9
- from typing import Sequence, Union
10
-
11
- import sqlalchemy as sa
12
- from alembic import op
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = "04ca4f231047"
16
- down_revision: Union[str, None] = "d76646551a6c"
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- op.add_column(
23
- "workflow_queue",
24
- sa.Column(
25
- "executor_id",
26
- sa.Text(),
27
- nullable=True,
28
- ),
29
- schema="dbos",
30
- )
31
-
32
-
33
- def downgrade() -> None:
34
- op.drop_column("workflow_queue", "executor_id", schema="dbos")
@@ -1,45 +0,0 @@
1
- """add queue dedup
2
-
3
- Revision ID: 27ac6900c6ad
4
- Revises: 83f3732ae8e7
5
- Create Date: 2025-04-23 16:18:48.530047
6
-
7
- """
8
-
9
- from typing import Sequence, Union
10
-
11
- import sqlalchemy as sa
12
- from alembic import op
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = "27ac6900c6ad"
16
- down_revision: Union[str, None] = "83f3732ae8e7"
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- op.add_column(
23
- "workflow_queue",
24
- sa.Column(
25
- "deduplication_id",
26
- sa.Text(),
27
- nullable=True,
28
- ),
29
- schema="dbos",
30
- )
31
-
32
- # Unique constraint for queue_name, deduplication_id
33
- op.create_unique_constraint(
34
- "uq_workflow_queue_name_dedup_id",
35
- "workflow_queue",
36
- ["queue_name", "deduplication_id"],
37
- schema="dbos",
38
- )
39
-
40
-
41
- def downgrade() -> None:
42
- op.drop_constraint(
43
- "uq_workflow_queue_name_dedup_id", "workflow_queue", schema="dbos"
44
- )
45
- op.drop_column("workflow_queue", "deduplication_id", schema="dbos")
@@ -1,35 +0,0 @@
1
- """dbos_migrations
2
-
3
- Revision ID: 471b60d64126
4
- Revises: 01ce9f07bd10
5
- Create Date: 2025-08-21 14:22:31.455266
6
-
7
- """
8
-
9
- from typing import Sequence, Union
10
-
11
- import sqlalchemy as sa
12
- from alembic import op
13
-
14
- # revision identifiers, used by Alembic.
15
- revision: str = "471b60d64126"
16
- down_revision: Union[str, None] = "01ce9f07bd10"
17
- branch_labels: Union[str, Sequence[str], None] = None
18
- depends_on: Union[str, Sequence[str], None] = None
19
-
20
-
21
- def upgrade() -> None:
22
- # Create dbos_migrations table
23
- op.create_table(
24
- "dbos_migrations",
25
- sa.Column("version", sa.BigInteger(), nullable=False),
26
- sa.PrimaryKeyConstraint("version"),
27
- schema="dbos",
28
- )
29
-
30
- # Insert initial version 1
31
- op.execute("INSERT INTO dbos.dbos_migrations (version) VALUES (1)")
32
-
33
-
34
- def downgrade() -> None:
35
- op.drop_table("dbos_migrations", schema="dbos")