dbos 0.19.0a4__py3-none-any.whl → 0.20.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dbos/cli/cli.py ADDED
@@ -0,0 +1,367 @@
1
+ import os
2
+ import platform
3
+ import signal
4
+ import subprocess
5
+ import time
6
+ import typing
7
+ from os import path
8
+ from typing import Any
9
+
10
+ import jsonpickle # type: ignore
11
+ import requests
12
+ import sqlalchemy as sa
13
+ import typer
14
+ from rich import print
15
+ from rich.prompt import IntPrompt
16
+ from typing_extensions import Annotated
17
+
18
+ from .. import load_config
19
+ from .._app_db import ApplicationDatabase
20
+ from .._dbos_config import _is_valid_app_name
21
+ from .._sys_db import SystemDatabase, reset_system_database
22
+ from .._workflow_commands import _cancel_workflow, _get_workflow, _list_workflows
23
+ from ..cli._github_init import create_template_from_github
24
+ from ._template_init import copy_template, get_project_name, get_templates_directory
25
+
26
+ app = typer.Typer()
27
+ workflow = typer.Typer()
28
+
29
+ app.add_typer(workflow, name="workflow", help="Manage DBOS workflows")
30
+
31
+
32
+ def _on_windows() -> bool:
33
+ return platform.system() == "Windows"
34
+
35
+
36
+ @app.command(
37
+ help="Start your DBOS application using the start commands in 'dbos-config.yaml'"
38
+ )
39
+ def start() -> None:
40
+ config = load_config()
41
+ start_commands = config["runtimeConfig"]["start"]
42
+ typer.echo("Executing start commands from 'dbos-config.yaml'")
43
+ for command in start_commands:
44
+ typer.echo(f"Executing: {command}")
45
+
46
+ # Run the command in the child process.
47
+ # On Unix-like systems, set its process group
48
+ process = subprocess.Popen(
49
+ command,
50
+ shell=True,
51
+ text=True,
52
+ preexec_fn=os.setsid if not _on_windows() else None,
53
+ )
54
+
55
+ def signal_handler(signum: int, frame: Any) -> None:
56
+ """
57
+ Forward kill signals to children.
58
+
59
+ When we receive a signal, send it to the entire process group of the child.
60
+ If that doesn't work, SIGKILL them then exit.
61
+ """
62
+ # Send the signal to the child's entire process group
63
+ if process.poll() is None:
64
+ os.killpg(os.getpgid(process.pid), signum)
65
+
66
+ # Give some time for the child to terminate
67
+ for _ in range(10): # Wait up to 1 second
68
+ if process.poll() is not None:
69
+ break
70
+ time.sleep(0.1)
71
+
72
+ # If the child is still running, force kill it
73
+ if process.poll() is None:
74
+ os.killpg(os.getpgid(process.pid), signal.SIGKILL)
75
+
76
+ # Exit immediately
77
+ os._exit(process.returncode if process.returncode is not None else 1)
78
+
79
+ # Configure the single handler only on Unix-like systems.
80
+ # TODO: Also kill the children on Windows.
81
+ if not _on_windows():
82
+ signal.signal(signal.SIGINT, signal_handler)
83
+ signal.signal(signal.SIGTERM, signal_handler)
84
+ process.wait()
85
+
86
+
87
+ @app.command(help="Initialize a new DBOS application from a template")
88
+ def init(
89
+ project_name: Annotated[
90
+ typing.Optional[str], typer.Argument(help="Specify application name")
91
+ ] = None,
92
+ template: Annotated[
93
+ typing.Optional[str],
94
+ typer.Option("--template", "-t", help="Specify template to use"),
95
+ ] = None,
96
+ config: Annotated[
97
+ bool,
98
+ typer.Option("--config", "-c", help="Only add dbos-config.yaml"),
99
+ ] = False,
100
+ ) -> None:
101
+ try:
102
+
103
+ git_templates = ["dbos-toolbox", "dbos-app-starter", "dbos-cron-starter"]
104
+ templates_dir = get_templates_directory()
105
+ templates = git_templates + [
106
+ x.name for x in os.scandir(templates_dir) if x.is_dir()
107
+ ]
108
+
109
+ if config and template is None:
110
+ template = templates[-1]
111
+
112
+ if template:
113
+ if template not in templates:
114
+ raise Exception(f"Template {template} not found in {templates_dir}")
115
+ else:
116
+ print("\n[bold]Available templates:[/bold]")
117
+ for idx, template_name in enumerate(templates, 1):
118
+ print(f" {idx}. {template_name}")
119
+ while True:
120
+ try:
121
+ choice = IntPrompt.ask(
122
+ "\nSelect template number",
123
+ show_choices=False,
124
+ show_default=False,
125
+ )
126
+ if 1 <= choice <= len(templates):
127
+ template = templates[choice - 1]
128
+ break
129
+ else:
130
+ print(
131
+ "[red]Invalid selection. Please choose a number from the list.[/red]"
132
+ )
133
+ except (KeyboardInterrupt, EOFError):
134
+ raise typer.Abort()
135
+ except ValueError:
136
+ print("[red]Please enter a valid number.[/red]")
137
+
138
+ if template in git_templates:
139
+ project_name = template
140
+ else:
141
+ if project_name is None:
142
+ project_name = typing.cast(
143
+ str,
144
+ typer.prompt("What is your project's name?", get_project_name()),
145
+ )
146
+
147
+ if not _is_valid_app_name(project_name):
148
+ raise Exception(
149
+ f"{project_name} is an invalid DBOS app name. App names must be between 3 and 30 characters long and contain only lowercase letters, numbers, dashes, and underscores."
150
+ )
151
+
152
+ if template in git_templates:
153
+ create_template_from_github(app_name=project_name, template_name=template)
154
+ else:
155
+ copy_template(
156
+ path.join(templates_dir, template), project_name, config_mode=config
157
+ )
158
+ except Exception as e:
159
+ print(f"[red]{e}[/red]")
160
+
161
+
162
+ @app.command(
163
+ help="Run your database schema migrations using the migration commands in 'dbos-config.yaml'"
164
+ )
165
+ def migrate() -> None:
166
+ config = load_config()
167
+ if not config["database"]["password"]:
168
+ typer.echo(
169
+ "DBOS configuration does not contain database password, please check your config file and retry!"
170
+ )
171
+ raise typer.Exit(code=1)
172
+ app_db_name = config["database"]["app_db_name"]
173
+
174
+ typer.echo(f"Starting schema migration for database {app_db_name}")
175
+
176
+ # First, run DBOS migrations on the system database and the application database
177
+ app_db = None
178
+ sys_db = None
179
+ try:
180
+ sys_db = SystemDatabase(config)
181
+ app_db = ApplicationDatabase(config)
182
+ except Exception as e:
183
+ typer.echo(f"DBOS system schema migration failed: {e}")
184
+ finally:
185
+ if sys_db:
186
+ sys_db.destroy()
187
+ if app_db:
188
+ app_db.destroy()
189
+
190
+ # Next, run any custom migration commands specified in the configuration
191
+ typer.echo("Executing migration commands from 'dbos-config.yaml'")
192
+ try:
193
+ migrate_commands = (
194
+ config["database"]["migrate"]
195
+ if "migrate" in config["database"] and config["database"]["migrate"]
196
+ else []
197
+ )
198
+ for command in migrate_commands:
199
+ typer.echo(f"Executing migration command: {command}")
200
+ result = subprocess.run(command, shell=True, text=True)
201
+ if result.returncode != 0:
202
+ typer.echo(f"Migration command failed: {command}")
203
+ typer.echo(result.stderr)
204
+ raise typer.Exit(1)
205
+ if result.stdout:
206
+ typer.echo(result.stdout.rstrip())
207
+ except Exception as e:
208
+ typer.echo(f"An error occurred during schema migration: {e}")
209
+ raise typer.Exit(code=1)
210
+
211
+ typer.echo(f"Completed schema migration for database {app_db_name}")
212
+
213
+
214
+ @app.command(help="Reset the DBOS system database")
215
+ def reset(
216
+ yes: bool = typer.Option(False, "-y", "--yes", help="Skip confirmation prompt")
217
+ ) -> None:
218
+ if not yes:
219
+ confirm = typer.confirm(
220
+ "This command resets your DBOS system database, deleting metadata about past workflows and steps. Are you sure you want to proceed?"
221
+ )
222
+ if not confirm:
223
+ typer.echo("Operation cancelled.")
224
+ raise typer.Exit()
225
+ config = load_config()
226
+ try:
227
+ reset_system_database(config)
228
+ except sa.exc.SQLAlchemyError as e:
229
+ typer.echo(f"Error resetting system database: {str(e)}")
230
+ return
231
+
232
+
233
+ @workflow.command(help="List workflows for your application")
234
+ def list(
235
+ limit: Annotated[
236
+ int,
237
+ typer.Option("--limit", "-l", help="Limit the results returned"),
238
+ ] = 10,
239
+ user: Annotated[
240
+ typing.Optional[str],
241
+ typer.Option("--user", "-u", help="Retrieve workflows run by this user"),
242
+ ] = None,
243
+ starttime: Annotated[
244
+ typing.Optional[str],
245
+ typer.Option(
246
+ "--start-time",
247
+ "-s",
248
+ help="Retrieve workflows starting after this timestamp (ISO 8601 format)",
249
+ ),
250
+ ] = None,
251
+ endtime: Annotated[
252
+ typing.Optional[str],
253
+ typer.Option(
254
+ "--end-time",
255
+ "-e",
256
+ help="Retrieve workflows starting before this timestamp (ISO 8601 format)",
257
+ ),
258
+ ] = None,
259
+ status: Annotated[
260
+ typing.Optional[str],
261
+ typer.Option(
262
+ "--status",
263
+ "-S",
264
+ help="Retrieve workflows with this status (PENDING, SUCCESS, ERROR, RETRIES_EXCEEDED, ENQUEUED, or CANCELLED)",
265
+ ),
266
+ ] = None,
267
+ appversion: Annotated[
268
+ typing.Optional[str],
269
+ typer.Option(
270
+ "--application-version",
271
+ "-v",
272
+ help="Retrieve workflows with this application version",
273
+ ),
274
+ ] = None,
275
+ request: Annotated[
276
+ bool,
277
+ typer.Option("--request", help="Retrieve workflow request information"),
278
+ ] = True,
279
+ appdir: Annotated[
280
+ typing.Optional[str],
281
+ typer.Option("--app-dir", "-d", help="Specify the application root directory"),
282
+ ] = None,
283
+ ) -> None:
284
+ config = load_config()
285
+ workflows = _list_workflows(
286
+ config, limit, user, starttime, endtime, status, request, appversion
287
+ )
288
+ print(jsonpickle.encode(workflows, unpicklable=False))
289
+
290
+
291
+ @workflow.command(help="Retrieve the status of a workflow")
292
+ def get(
293
+ uuid: Annotated[str, typer.Argument()],
294
+ appdir: Annotated[
295
+ typing.Optional[str],
296
+ typer.Option("--app-dir", "-d", help="Specify the application root directory"),
297
+ ] = None,
298
+ request: Annotated[
299
+ bool,
300
+ typer.Option("--request", help="Retrieve workflow request information"),
301
+ ] = True,
302
+ ) -> None:
303
+ config = load_config()
304
+ print(jsonpickle.encode(_get_workflow(config, uuid, request), unpicklable=False))
305
+
306
+
307
+ @workflow.command(
308
+ help="Cancel a workflow so it is no longer automatically retried or restarted"
309
+ )
310
+ def cancel(
311
+ uuid: Annotated[str, typer.Argument()],
312
+ appdir: Annotated[
313
+ typing.Optional[str],
314
+ typer.Option("--app-dir", "-d", help="Specify the application root directory"),
315
+ ] = None,
316
+ ) -> None:
317
+ config = load_config()
318
+ _cancel_workflow(config, uuid)
319
+ print(f"Workflow {uuid} has been cancelled")
320
+
321
+
322
+ @workflow.command(help="Resume a workflow that has been cancelled")
323
+ def resume(
324
+ uuid: Annotated[str, typer.Argument()],
325
+ host: Annotated[
326
+ typing.Optional[str],
327
+ typer.Option("--host", "-h", help="Specify the admin host"),
328
+ ] = "localhost",
329
+ port: Annotated[
330
+ typing.Optional[int],
331
+ typer.Option("--port", "-p", help="Specify the admin port"),
332
+ ] = 3001,
333
+ ) -> None:
334
+ response = requests.post(
335
+ f"http://{host}:{port}/workflows/{uuid}/resume", json=[], timeout=5
336
+ )
337
+
338
+ if response.status_code == 200:
339
+ print(f"Workflow {uuid} has been resumed")
340
+ else:
341
+ print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
342
+
343
+
344
+ @workflow.command(help="Restart a workflow from the beginning with a new id")
345
+ def restart(
346
+ uuid: Annotated[str, typer.Argument()],
347
+ host: Annotated[
348
+ typing.Optional[str],
349
+ typer.Option("--host", "-h", help="Specify the admin host"),
350
+ ] = "localhost",
351
+ port: Annotated[
352
+ typing.Optional[int],
353
+ typer.Option("--port", "-p", help="Specify the admin port"),
354
+ ] = 3001,
355
+ ) -> None:
356
+ response = requests.post(
357
+ f"http://{host}:{port}/workflows/{uuid}/restart", json=[], timeout=5
358
+ )
359
+
360
+ if response.status_code == 200:
361
+ print(f"Workflow {uuid} has been restarted")
362
+ else:
363
+ print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
364
+
365
+
366
+ if __name__ == "__main__":
367
+ app()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.19.0a4
3
+ Version: 0.20.0
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -28,14 +28,14 @@ Description-Content-Type: text/markdown
28
28
 
29
29
  <div align="center">
30
30
 
31
- # DBOS Transact: Ultra-Lightweight Durable Execution
31
+ # DBOS Transact: A Lightweight Durable Execution Library Built on Postgres
32
32
 
33
33
  #### [Documentation](https://docs.dbos.dev/) &nbsp;&nbsp;•&nbsp;&nbsp; [Examples](https://docs.dbos.dev/examples) &nbsp;&nbsp;•&nbsp;&nbsp; [Github](https://github.com/dbos-inc) &nbsp;&nbsp;•&nbsp;&nbsp; [Discord](https://discord.com/invite/jsmC6pXGgX)
34
34
  </div>
35
35
 
36
36
  ---
37
37
 
38
- DBOS Transact is a Python library providing **ultra-lightweight durable execution**.
38
+ DBOS Transact is a Python library for **ultra-lightweight durable execution**.
39
39
  For example:
40
40
 
41
41
  ```python
@@ -55,18 +55,23 @@ def workflow()
55
55
 
56
56
  Durable execution means your program is **resilient to any failure**.
57
57
  If it is ever interrupted or crashes, all your workflows will automatically resume from the last completed step.
58
- If you want to see durable execution in action, check out [this demo app](https://demo-widget-store.cloud.dbos.dev/) (source code [here](https://github.com/dbos-inc/dbos-demo-apps/tree/main/python/widget-store)).
59
- No matter how many times you try to crash it, it always resumes from exactly where it left off!
58
+ Durable execution helps solve many common problems:
60
59
 
61
- Under the hood, DBOS Transact works by storing your program's execution state (which workflows are currently executing and which steps they've completed) in a Postgres database.
62
- So all you need to use it is a Postgres database to connect to&mdash;there's no need for a "workflow server."
63
- This approach is also incredibly fast, for example [25x faster than AWS Step Functions](https://www.dbos.dev/blog/dbos-vs-aws-step-functions-benchmark).
60
+ - Orchestrating long-running or business-critical workflows so they seamlessly recover from any failure.
61
+ - Running reliable background jobs with no timeouts.
62
+ - Processing incoming events (e.g. from Kafka) exactly once.
63
+ - Running a fault-tolerant distributed task queue.
64
+ - Running a reliable cron scheduler.
65
+ - Operating an AI agent, or anything that connects to an unreliable or non-deterministic API.
64
66
 
65
- Some more cool features include:
67
+ What’s unique about DBOS's implementation of durable execution is that it’s implemented in a **lightweight library** that’s **totally backed by Postgres**.
68
+ To use DBOS, just `pip install` it and annotate your program with DBOS decorators.
69
+ Under the hood, those decorators store your program's execution state (which workflows are currently executing and which steps they've completed) in a Postgres database.
70
+ If your program crashes or is interrupted, they automatically recover its workflows from their stored state.
71
+ So all you need to use DBOS is Postgres&mdash;there are no other dependencies you have to manage, no separate workflow server.
66
72
 
67
- - Scheduled jobs&mdash;run your workflows exactly-once per time interval.
68
- - Exactly-once event processing&mdash;use workflows to process incoming events (for example, from a Kafka topic) exactly-once.
69
- - Observability&mdash;all workflows automatically emit [OpenTelemetry](https://opentelemetry.io/) traces.
73
+ One big advantage of this approach is that you can add DBOS to **any** Python application&mdash;**it’s just a library**.
74
+ You can use DBOS to add reliable background jobs or cron scheduling or queues to your app with no external dependencies except Postgres.
70
75
 
71
76
  ## Getting Started
72
77
 
@@ -77,7 +82,7 @@ pip install dbos
77
82
  dbos init --config
78
83
  ```
79
84
 
80
- Then, try it out with this simple program (requires Postgres):
85
+ Then, try it out with this simple program:
81
86
 
82
87
  ```python
83
88
  from fastapi import FastAPI
@@ -107,14 +112,14 @@ def fastapi_endpoint():
107
112
  dbos_workflow()
108
113
  ```
109
114
 
110
- Save the program into `main.py`, edit `dbos-config.yaml` to configure your Postgres connection settings, and start it with `fastapi run`.
115
+ Save the program into `main.py` and start it with `fastapi run`.
111
116
  Visit `localhost:8000` in your browser to start the workflow.
112
117
  When prompted, press `Control + \` to force quit your application.
113
118
  It should crash midway through the workflow, having completed step one but not step two.
114
119
  Then, restart your app with `fastapi run`.
115
120
  It should resume the workflow from where it left off, completing step two without re-executing step one.
116
121
 
117
- To learn how to build more complex workflows, see our [programming guide](https://docs.dbos.dev/python/programming-guide) or [examples](https://docs.dbos.dev/examples).
122
+ To learn how to build more complex workflows, see the [programming guide](https://docs.dbos.dev/python/programming-guide) or [examples](https://docs.dbos.dev/examples).
118
123
 
119
124
  ## Documentation
120
125
 
@@ -125,7 +130,7 @@ To learn how to build more complex workflows, see our [programming guide](https:
125
130
 
126
131
  - [**AI-Powered Slackbot**](https://docs.dbos.dev/python/examples/rag-slackbot) &mdash; A Slackbot that answers questions about previous Slack conversations, using DBOS to durably orchestrate its RAG pipeline.
127
132
  - [**Widget Store**](https://docs.dbos.dev/python/examples/widget-store) &mdash; An online storefront that uses DBOS durable workflows to be resilient to any failure.
128
- - [**Earthquake Tracker**](https://docs.dbos.dev/python/examples/earthquake-tracker) &mdash; A real-time earthquake dashboard that uses DBOS to stream data from the USGS into Postgres, then visualizes it with Streamlit.
133
+ - [**Scheduled Reminders**](https://docs.dbos.dev/python/examples/scheduled-reminders) &mdash; In just three lines of code, schedule an email to send days, weeks, or months in the future.
129
134
 
130
135
  More examples [here](https://docs.dbos.dev/examples)!
131
136
 
@@ -1,24 +1,24 @@
1
- dbos-0.19.0a4.dist-info/METADATA,sha256=NOwdv7iSopa_WvJkvQ3-AJ-peRmVMFW3E5D7SfxERqI,5144
2
- dbos-0.19.0a4.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
- dbos-0.19.0a4.dist-info/entry_points.txt,sha256=z6GcVANQV7Uw_82H9Ob2axJX6V3imftyZsljdh-M1HU,54
4
- dbos-0.19.0a4.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-0.20.0.dist-info/METADATA,sha256=4gV-eeocBCalrCoYpN8ryaGJNHvu4clGO7mZfGD-Cco,5307
2
+ dbos-0.20.0.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
+ dbos-0.20.0.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-0.20.0.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=CxRHBHEthPL4PZoLbZhp3rdm44-KkRTT2-7DkK9d4QQ,724
6
- dbos/_admin_server.py,sha256=DOgzVp9kmwiebQqmJB1LcrZnGTxSMbZiGXdenc1wZDg,3163
6
+ dbos/_admin_server.py,sha256=PJgneZG9-64TapZrPeJtt73puAswRImCE5uce2k2PKU,4750
7
7
  dbos/_app_db.py,sha256=_tv2vmPjjiaikwgxH3mqxgJ4nUUcG2-0uMXKWCqVu1c,5509
8
8
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
9
9
  dbos/_cloudutils/authentication.py,sha256=V0fCWQN9stCkhbuuxgPTGpvuQcDqfU3KAxPAh01vKW4,5007
10
10
  dbos/_cloudutils/cloudutils.py,sha256=5e3CW1deSW-dI5G3QN0XbiVsBhyqT8wu7fuV2f8wtGU,7688
11
11
  dbos/_cloudutils/databases.py,sha256=x4187Djsyoa-QaG3Kog8JT2_GERsnqa93LIVanmVUmg,8393
12
- dbos/_context.py,sha256=KV3fd3-Rv6EWrYDUdHARxltSlNZGNtQtNSqeQ-gkXE8,18049
13
- dbos/_core.py,sha256=dbG8573iSzB_WITWOh6yOV-w32BM8UbJcOB4Fr0e-lw,34456
12
+ dbos/_context.py,sha256=RH08s_nee95vgxdz6AsYuVWF1LuJSVtOyIifblsa4pw,18760
13
+ dbos/_core.py,sha256=-2oh2-NicMJBwTwrd2EQBQm4Vu0caozFeoS9Kj47DzM,36588
14
14
  dbos/_croniter.py,sha256=hbhgfsHBqclUS8VeLnJ9PSE9Z54z6mi4nnrr1aUXn0k,47561
15
15
  dbos/_db_wizard.py,sha256=xgKLna0_6Xi50F3o8msRosXba8NScHlpJR5ICVCkHDQ,7534
16
- dbos/_dbos.py,sha256=LWFa48CPt7bsNAnMZrNDzHHTFCyMrY-nKbMZwCG_dqY,34710
16
+ dbos/_dbos.py,sha256=1PG142hzPBFguAbuBXaKS-YwzRdaIUW8087JCi78RmU,36193
17
17
  dbos/_dbos_config.py,sha256=h_q1gzudhsAMVkGMD0qQ6kLic6YhdJgzm50YFSIx9Bo,8196
18
- dbos/_error.py,sha256=UETk8CoZL-TO2Utn1-E7OSWelhShWmKM-fOlODMR9PE,3893
19
- dbos/_fastapi.py,sha256=iyefCZq-ZDKRUjN_rgYQmFmyvWf4gPrSlC6CLbfq4a8,3419
20
- dbos/_flask.py,sha256=z1cijbTi5Dpq6kqikPCx1LcR2YHHv2oc41NehOWjw74,2431
21
- dbos/_kafka.py,sha256=OmOKfO7_3Z2FUFv_sJaIfebd7xnqtuRRndzNTTufgb8,3654
18
+ dbos/_error.py,sha256=vtaSsG0QW6cRlwfZ4zzZWy_IHCZlomwSlrDyGWuyn8c,4337
19
+ dbos/_fastapi.py,sha256=ke03vqsSYDnO6XeOtOVFXj0-f-v1MGsOxa9McaROvNc,3616
20
+ dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
21
+ dbos/_kafka.py,sha256=o6DbwnsYRDtvVTZVsN7BAK8cdP79AfoWX3Q7CGY2Yuo,4199
22
22
  dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
23
23
  dbos/_logger.py,sha256=iYwbA7DLyXalWa2Yu07HO6Xm301nRuenMU64GgwUMkU,3576
24
24
  dbos/_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
@@ -31,7 +31,7 @@ dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py,sha256=8PyFi8rd6CN-m
31
31
  dbos/_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4hGBC02Ptng1715roTjY3xiyzZU4,729
32
32
  dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT0qCAI7BiZTou9wt6QnbY7c,1412
33
33
  dbos/_outcome.py,sha256=FDMgWVjZ06vm9xO-38H17mTqBImUYQxgKs_bDCSIAhE,6648
34
- dbos/_queue.py,sha256=VSaF-BTv2tm-44O_690omo0pE31NQAhOT3ARL4VLRzY,2723
34
+ dbos/_queue.py,sha256=o_aczwualJTMoXb0XXL-Y5QH77OEukWzuerogbWi2ho,2779
35
35
  dbos/_recovery.py,sha256=jbzGYxICA2drzyzlBSy2UiXhKV_16tBVacKQdTkqf-w,2008
36
36
  dbos/_registrations.py,sha256=mei6q6_3R5uei8i_Wo_TqGZs85s10shOekDX41sFYD0,6642
37
37
  dbos/_request.py,sha256=cX1B3Atlh160phgS35gF1VEEV4pD126c9F3BDgBmxZU,929
@@ -41,20 +41,23 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
41
  dbos/_schemas/application_database.py,sha256=KeyoPrF7hy_ODXV7QNike_VFSD74QBRfQ76D7QyE9HI,966
42
42
  dbos/_schemas/system_database.py,sha256=rwp4EvCSaXcUoMaRczZCvETCxGp72k3-hvLyGUDkih0,5163
43
43
  dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
44
- dbos/_sys_db.py,sha256=2W3ta0Q-isESMjyGbXCPfaoll-vyPQg1innBEeNfg2c,50088
45
- dbos/_templates/hello/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
46
- dbos/_templates/hello/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
- dbos/_templates/hello/__package/main.py,sha256=eI0SS9Nwj-fldtiuSzIlIG6dC91GXXwdRsoHxv6S_WI,2719
48
- dbos/_templates/hello/__package/schema.py,sha256=7Z27JGC8yy7Z44cbVXIREYxtUhU4JVkLCp5Q7UahVQ0,260
49
- dbos/_templates/hello/alembic.ini,sha256=VKBn4Gy8mMuCdY7Hip1jmo3wEUJ1VG1aW7EqY0_n-as,3695
50
- dbos/_templates/hello/dbos-config.yaml.dbos,sha256=OMlcpdYUJKjyAme7phOz3pbn9upcIRjm42iwEThWUEQ,495
51
- dbos/_templates/hello/migrations/env.py.dbos,sha256=GUV6sjkDzf9Vl6wkGEd0RSkK-ftRfV6EUwSQdd0qFXg,2392
52
- dbos/_templates/hello/migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
53
- dbos/_templates/hello/migrations/versions/2024_07_31_180642_init.py,sha256=U5thFWGqNN4QLrNXT7wUUqftIFDNE5eSdqD8JNW1mec,942
54
- dbos/_templates/hello/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
44
+ dbos/_sys_db.py,sha256=eXFXzmw_bq5Qp3s2_OzjkQKQj9HxMbP4AyJ2VQnJ08g,53786
45
+ dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
46
+ dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
+ dbos/_templates/dbos-db-starter/__package/main.py,sha256=eI0SS9Nwj-fldtiuSzIlIG6dC91GXXwdRsoHxv6S_WI,2719
48
+ dbos/_templates/dbos-db-starter/__package/schema.py,sha256=7Z27JGC8yy7Z44cbVXIREYxtUhU4JVkLCp5Q7UahVQ0,260
49
+ dbos/_templates/dbos-db-starter/alembic.ini,sha256=VKBn4Gy8mMuCdY7Hip1jmo3wEUJ1VG1aW7EqY0_n-as,3695
50
+ dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos,sha256=OMlcpdYUJKjyAme7phOz3pbn9upcIRjm42iwEThWUEQ,495
51
+ dbos/_templates/dbos-db-starter/migrations/env.py.dbos,sha256=GUV6sjkDzf9Vl6wkGEd0RSkK-ftRfV6EUwSQdd0qFXg,2392
52
+ dbos/_templates/dbos-db-starter/migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
53
+ dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=U5thFWGqNN4QLrNXT7wUUqftIFDNE5eSdqD8JNW1mec,942
54
+ dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
55
55
  dbos/_tracer.py,sha256=rvBY1RQU6DO7rL7EnaJJxGcmd4tP_PpGqUEE6imZnhY,2518
56
- dbos/cli.py,sha256=em1uAxrp5yyg53V7ZpmHFtqD6OJp2cMJkG9vGJPoFTA,10904
56
+ dbos/_workflow_commands.py,sha256=tj-gJARjDJ5aYo0ii2udTAU4l36vbeXwmOYh8Q4y_ac,4625
57
+ dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
58
+ dbos/cli/_template_init.py,sha256=AfuMaO8bmr9WsPNHr6j2cp7kjVVZDUpH7KpbTg0hhFs,2722
59
+ dbos/cli/cli.py,sha256=07TXdfDhImEOjB2-yhWJc1CK07_CSF-xF7TYCtB1kRY,12410
57
60
  dbos/dbos-config.schema.json,sha256=X5TpXNcARGceX0zQs0fVgtZW_Xj9uBbY5afPt9Rz9yk,5741
58
61
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
59
62
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
60
- dbos-0.19.0a4.dist-info/RECORD,,
63
+ dbos-0.20.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  [console_scripts]
2
- dbos = dbos.cli:app
2
+ dbos = dbos.cli.cli:app
3
3
 
4
4
  [gui_scripts]
5
5