dbos 0.19.0a9__py3-none-any.whl → 0.20.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_admin_server.py +45 -2
- dbos/_core.py +61 -5
- dbos/_dbos.py +36 -1
- dbos/_fastapi.py +16 -11
- dbos/_flask.py +6 -2
- dbos/_sys_db.py +44 -1
- dbos/_workflow_commands.py +4 -5
- dbos/cli/_github_init.py +107 -0
- dbos/cli/_template_init.py +98 -0
- dbos/{cli.py → cli/cli.py} +104 -173
- {dbos-0.19.0a9.dist-info → dbos-0.20.0.dist-info}/METADATA +1 -1
- {dbos-0.19.0a9.dist-info → dbos-0.20.0.dist-info}/RECORD +25 -23
- {dbos-0.19.0a9.dist-info → dbos-0.20.0.dist-info}/entry_points.txt +1 -1
- /dbos/_templates/{hello → dbos-db-starter}/README.md +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/__package/__init__.py +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/__package/main.py +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/__package/schema.py +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/alembic.ini +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/dbos-config.yaml.dbos +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/migrations/env.py.dbos +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/migrations/script.py.mako +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/migrations/versions/2024_07_31_180642_init.py +0 -0
- /dbos/_templates/{hello → dbos-db-starter}/start_postgres_docker.py +0 -0
- {dbos-0.19.0a9.dist-info → dbos-0.20.0.dist-info}/WHEEL +0 -0
- {dbos-0.19.0a9.dist-info → dbos-0.20.0.dist-info}/licenses/LICENSE +0 -0
dbos/_admin_server.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import json
|
|
4
|
+
import re
|
|
4
5
|
import threading
|
|
5
6
|
from functools import partial
|
|
6
7
|
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
|
|
@@ -15,6 +16,9 @@ if TYPE_CHECKING:
|
|
|
15
16
|
_health_check_path = "/dbos-healthz"
|
|
16
17
|
_workflow_recovery_path = "/dbos-workflow-recovery"
|
|
17
18
|
_deactivate_path = "/deactivate"
|
|
19
|
+
# /workflows/:workflow_id/cancel
|
|
20
|
+
# /workflows/:workflow_id/resume
|
|
21
|
+
# /workflows/:workflow_id/restart
|
|
18
22
|
|
|
19
23
|
|
|
20
24
|
class AdminServer:
|
|
@@ -79,12 +83,51 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
|
79
83
|
self._end_headers()
|
|
80
84
|
self.wfile.write(json.dumps(workflow_ids).encode("utf-8"))
|
|
81
85
|
else:
|
|
82
|
-
|
|
83
|
-
|
|
86
|
+
|
|
87
|
+
restart_match = re.match(
|
|
88
|
+
r"^/workflows/(?P<workflow_id>[^/]+)/restart$", self.path
|
|
89
|
+
)
|
|
90
|
+
resume_match = re.match(
|
|
91
|
+
r"^/workflows/(?P<workflow_id>[^/]+)/resume$", self.path
|
|
92
|
+
)
|
|
93
|
+
cancel_match = re.match(
|
|
94
|
+
r"^/workflows/(?P<workflow_id>[^/]+)/cancel$", self.path
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
if restart_match:
|
|
98
|
+
workflow_id = restart_match.group("workflow_id")
|
|
99
|
+
self._handle_restart(workflow_id)
|
|
100
|
+
elif resume_match:
|
|
101
|
+
workflow_id = resume_match.group("workflow_id")
|
|
102
|
+
self._handle_resume(workflow_id)
|
|
103
|
+
elif cancel_match:
|
|
104
|
+
workflow_id = cancel_match.group("workflow_id")
|
|
105
|
+
self._handle_cancel(workflow_id)
|
|
106
|
+
else:
|
|
107
|
+
self.send_response(404)
|
|
108
|
+
self._end_headers()
|
|
84
109
|
|
|
85
110
|
def log_message(self, format: str, *args: Any) -> None:
|
|
86
111
|
return # Disable admin server request logging
|
|
87
112
|
|
|
113
|
+
def _handle_restart(self, workflow_id: str) -> None:
|
|
114
|
+
self.dbos.restart_workflow(workflow_id)
|
|
115
|
+
print("Restarting workflow", workflow_id)
|
|
116
|
+
self.send_response(204)
|
|
117
|
+
self._end_headers()
|
|
118
|
+
|
|
119
|
+
def _handle_resume(self, workflow_id: str) -> None:
|
|
120
|
+
print("Resuming workflow", workflow_id)
|
|
121
|
+
self.dbos.resume_workflow(workflow_id)
|
|
122
|
+
self.send_response(204)
|
|
123
|
+
self._end_headers()
|
|
124
|
+
|
|
125
|
+
def _handle_cancel(self, workflow_id: str) -> None:
|
|
126
|
+
print("Cancelling workflow", workflow_id)
|
|
127
|
+
self.dbos.cancel_workflow(workflow_id)
|
|
128
|
+
self.send_response(204)
|
|
129
|
+
self._end_headers()
|
|
130
|
+
|
|
88
131
|
|
|
89
132
|
# Be consistent with DBOS-TS response.
|
|
90
133
|
class PerfUtilization(TypedDict):
|
dbos/_core.py
CHANGED
|
@@ -266,7 +266,9 @@ def _execute_workflow_wthread(
|
|
|
266
266
|
raise
|
|
267
267
|
|
|
268
268
|
|
|
269
|
-
def execute_workflow_by_id(
|
|
269
|
+
def execute_workflow_by_id(
|
|
270
|
+
dbos: "DBOS", workflow_id: str, startNew: bool = False
|
|
271
|
+
) -> "WorkflowHandle[Any]":
|
|
270
272
|
status = dbos._sys_db.get_workflow_status(workflow_id)
|
|
271
273
|
if not status:
|
|
272
274
|
raise DBOSRecoveryError(workflow_id, "Workflow status not found")
|
|
@@ -293,7 +295,8 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
|
|
|
293
295
|
workflow_id,
|
|
294
296
|
f"Cannot execute workflow because instance '{iname}' is not registered",
|
|
295
297
|
)
|
|
296
|
-
|
|
298
|
+
|
|
299
|
+
if startNew:
|
|
297
300
|
return start_workflow(
|
|
298
301
|
dbos,
|
|
299
302
|
wf_func,
|
|
@@ -303,6 +306,17 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
|
|
|
303
306
|
*inputs["args"],
|
|
304
307
|
**inputs["kwargs"],
|
|
305
308
|
)
|
|
309
|
+
else:
|
|
310
|
+
with SetWorkflowID(workflow_id):
|
|
311
|
+
return start_workflow(
|
|
312
|
+
dbos,
|
|
313
|
+
wf_func,
|
|
314
|
+
status["queue_name"],
|
|
315
|
+
True,
|
|
316
|
+
dbos._registry.instance_info_map[iname],
|
|
317
|
+
*inputs["args"],
|
|
318
|
+
**inputs["kwargs"],
|
|
319
|
+
)
|
|
306
320
|
elif status["class_name"] is not None:
|
|
307
321
|
class_name = status["class_name"]
|
|
308
322
|
if class_name not in dbos._registry.class_info_map:
|
|
@@ -310,7 +324,8 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
|
|
|
310
324
|
workflow_id,
|
|
311
325
|
f"Cannot execute workflow because class '{class_name}' is not registered",
|
|
312
326
|
)
|
|
313
|
-
|
|
327
|
+
|
|
328
|
+
if startNew:
|
|
314
329
|
return start_workflow(
|
|
315
330
|
dbos,
|
|
316
331
|
wf_func,
|
|
@@ -320,8 +335,19 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
|
|
|
320
335
|
*inputs["args"],
|
|
321
336
|
**inputs["kwargs"],
|
|
322
337
|
)
|
|
338
|
+
else:
|
|
339
|
+
with SetWorkflowID(workflow_id):
|
|
340
|
+
return start_workflow(
|
|
341
|
+
dbos,
|
|
342
|
+
wf_func,
|
|
343
|
+
status["queue_name"],
|
|
344
|
+
True,
|
|
345
|
+
dbos._registry.class_info_map[class_name],
|
|
346
|
+
*inputs["args"],
|
|
347
|
+
**inputs["kwargs"],
|
|
348
|
+
)
|
|
323
349
|
else:
|
|
324
|
-
|
|
350
|
+
if startNew:
|
|
325
351
|
return start_workflow(
|
|
326
352
|
dbos,
|
|
327
353
|
wf_func,
|
|
@@ -330,6 +356,16 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
|
|
|
330
356
|
*inputs["args"],
|
|
331
357
|
**inputs["kwargs"],
|
|
332
358
|
)
|
|
359
|
+
else:
|
|
360
|
+
with SetWorkflowID(workflow_id):
|
|
361
|
+
return start_workflow(
|
|
362
|
+
dbos,
|
|
363
|
+
wf_func,
|
|
364
|
+
status["queue_name"],
|
|
365
|
+
True,
|
|
366
|
+
*inputs["args"],
|
|
367
|
+
**inputs["kwargs"],
|
|
368
|
+
)
|
|
333
369
|
|
|
334
370
|
|
|
335
371
|
@overload
|
|
@@ -452,6 +488,22 @@ def start_workflow(
|
|
|
452
488
|
return WorkflowHandleFuture(new_wf_id, future, dbos)
|
|
453
489
|
|
|
454
490
|
|
|
491
|
+
if sys.version_info < (3, 12):
|
|
492
|
+
|
|
493
|
+
def _mark_coroutine(func: Callable[P, R]) -> Callable[P, R]:
|
|
494
|
+
@wraps(func)
|
|
495
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> R:
|
|
496
|
+
return await func(*args, **kwargs) # type: ignore
|
|
497
|
+
|
|
498
|
+
return async_wrapper # type: ignore
|
|
499
|
+
|
|
500
|
+
else:
|
|
501
|
+
|
|
502
|
+
def _mark_coroutine(func: Callable[P, R]) -> Callable[P, R]:
|
|
503
|
+
inspect.markcoroutinefunction(func)
|
|
504
|
+
return func
|
|
505
|
+
|
|
506
|
+
|
|
455
507
|
def workflow_wrapper(
|
|
456
508
|
dbosreg: "DBOSRegistry",
|
|
457
509
|
func: Callable[P, R],
|
|
@@ -512,7 +564,7 @@ def workflow_wrapper(
|
|
|
512
564
|
)
|
|
513
565
|
return outcome() # type: ignore
|
|
514
566
|
|
|
515
|
-
return wrapper
|
|
567
|
+
return _mark_coroutine(wrapper) if inspect.iscoroutinefunction(func) else wrapper
|
|
516
568
|
|
|
517
569
|
|
|
518
570
|
def decorate_workflow(
|
|
@@ -802,6 +854,10 @@ def decorate_step(
|
|
|
802
854
|
assert tempwf
|
|
803
855
|
return tempwf(*args, **kwargs)
|
|
804
856
|
|
|
857
|
+
wrapper = (
|
|
858
|
+
_mark_coroutine(wrapper) if inspect.iscoroutinefunction(func) else wrapper # type: ignore
|
|
859
|
+
)
|
|
860
|
+
|
|
805
861
|
def temp_wf_sync(*args: Any, **kwargs: Any) -> Any:
|
|
806
862
|
return wrapper(*args, **kwargs)
|
|
807
863
|
|
dbos/_dbos.py
CHANGED
|
@@ -56,6 +56,7 @@ from ._registrations import (
|
|
|
56
56
|
)
|
|
57
57
|
from ._roles import default_required_roles, required_roles
|
|
58
58
|
from ._scheduler import ScheduledWorkflow, scheduled
|
|
59
|
+
from ._sys_db import WorkflowStatusString, reset_system_database
|
|
59
60
|
from ._tracer import dbos_tracer
|
|
60
61
|
|
|
61
62
|
if TYPE_CHECKING:
|
|
@@ -231,6 +232,7 @@ class DBOS:
|
|
|
231
232
|
f"DBOS configured multiple times with conflicting information"
|
|
232
233
|
)
|
|
233
234
|
config = _dbos_global_registry.config
|
|
235
|
+
|
|
234
236
|
_dbos_global_instance = super().__new__(cls)
|
|
235
237
|
_dbos_global_instance.__init__(fastapi=fastapi, config=config, flask=flask) # type: ignore
|
|
236
238
|
else:
|
|
@@ -243,7 +245,7 @@ class DBOS:
|
|
|
243
245
|
return _dbos_global_instance
|
|
244
246
|
|
|
245
247
|
@classmethod
|
|
246
|
-
def destroy(cls, *, destroy_registry: bool =
|
|
248
|
+
def destroy(cls, *, destroy_registry: bool = False) -> None:
|
|
247
249
|
global _dbos_global_instance
|
|
248
250
|
if _dbos_global_instance is not None:
|
|
249
251
|
_dbos_global_instance._destroy()
|
|
@@ -407,6 +409,22 @@ class DBOS:
|
|
|
407
409
|
dbos_logger.error(f"DBOS failed to launch: {traceback.format_exc()}")
|
|
408
410
|
raise
|
|
409
411
|
|
|
412
|
+
@classmethod
|
|
413
|
+
def reset_system_database(cls) -> None:
|
|
414
|
+
"""
|
|
415
|
+
Destroy the DBOS system database. Useful for resetting the state of DBOS between tests.
|
|
416
|
+
This is a destructive operation and should only be used in a test environment.
|
|
417
|
+
More information on testing DBOS apps: https://docs.dbos.dev/python/tutorials/testing
|
|
418
|
+
"""
|
|
419
|
+
if _dbos_global_instance is not None:
|
|
420
|
+
_dbos_global_instance._reset_system_database()
|
|
421
|
+
|
|
422
|
+
def _reset_system_database(self) -> None:
|
|
423
|
+
assert (
|
|
424
|
+
not self._launched
|
|
425
|
+
), "The system database cannot be reset after DBOS is launched. Resetting the system database is a destructive operation that should only be used in a test environment."
|
|
426
|
+
reset_system_database(self.config)
|
|
427
|
+
|
|
410
428
|
def _destroy(self) -> None:
|
|
411
429
|
self._initialized = False
|
|
412
430
|
for event in self.stop_events:
|
|
@@ -767,6 +785,11 @@ class DBOS:
|
|
|
767
785
|
"""Execute a workflow by ID (for recovery)."""
|
|
768
786
|
return execute_workflow_by_id(_get_dbos_instance(), workflow_id)
|
|
769
787
|
|
|
788
|
+
@classmethod
|
|
789
|
+
def restart_workflow(cls, workflow_id: str) -> None:
|
|
790
|
+
"""Execute a workflow by ID (for recovery)."""
|
|
791
|
+
execute_workflow_by_id(_get_dbos_instance(), workflow_id, True)
|
|
792
|
+
|
|
770
793
|
@classmethod
|
|
771
794
|
def recover_pending_workflows(
|
|
772
795
|
cls, executor_ids: List[str] = ["local"]
|
|
@@ -774,6 +797,18 @@ class DBOS:
|
|
|
774
797
|
"""Find all PENDING workflows and execute them."""
|
|
775
798
|
return recover_pending_workflows(_get_dbos_instance(), executor_ids)
|
|
776
799
|
|
|
800
|
+
@classmethod
|
|
801
|
+
def cancel_workflow(cls, workflow_id: str) -> None:
|
|
802
|
+
"""Cancel a workflow by ID."""
|
|
803
|
+
_get_dbos_instance()._sys_db.set_workflow_status(
|
|
804
|
+
workflow_id, WorkflowStatusString.CANCELLED, False
|
|
805
|
+
)
|
|
806
|
+
|
|
807
|
+
@classmethod
|
|
808
|
+
def resume_workflow(cls, workflow_id: str) -> None:
|
|
809
|
+
"""Resume a workflow by ID."""
|
|
810
|
+
execute_workflow_by_id(_get_dbos_instance(), workflow_id, False)
|
|
811
|
+
|
|
777
812
|
@classproperty
|
|
778
813
|
def logger(cls) -> Logger:
|
|
779
814
|
"""Return the DBOS `Logger` for the current context."""
|
dbos/_fastapi.py
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import uuid
|
|
2
|
-
from typing import Any, Callable, cast
|
|
2
|
+
from typing import Any, Callable, MutableMapping, cast
|
|
3
3
|
|
|
4
4
|
from fastapi import FastAPI
|
|
5
5
|
from fastapi import Request as FastAPIRequest
|
|
6
6
|
from fastapi.responses import JSONResponse
|
|
7
|
-
from starlette.types import ASGIApp,
|
|
7
|
+
from starlette.types import ASGIApp, Receive, Scope, Send
|
|
8
8
|
|
|
9
9
|
from . import DBOS
|
|
10
10
|
from ._context import (
|
|
@@ -61,15 +61,16 @@ class LifespanMiddleware:
|
|
|
61
61
|
|
|
62
62
|
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
|
63
63
|
if scope["type"] == "lifespan":
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
if message["type"] == "lifespan.startup":
|
|
64
|
+
|
|
65
|
+
async def wrapped_send(message: MutableMapping[str, Any]) -> None:
|
|
66
|
+
if message["type"] == "lifespan.startup.complete":
|
|
67
67
|
self.dbos._launch()
|
|
68
|
-
|
|
69
|
-
elif message["type"] == "lifespan.shutdown":
|
|
68
|
+
elif message["type"] == "lifespan.shutdown.complete":
|
|
70
69
|
self.dbos._destroy()
|
|
71
|
-
|
|
72
|
-
|
|
70
|
+
await send(message)
|
|
71
|
+
|
|
72
|
+
# Call the original app with our wrapped functions
|
|
73
|
+
await self.app(scope, receive, wrapped_send)
|
|
73
74
|
else:
|
|
74
75
|
await self.app(scope, receive, send)
|
|
75
76
|
|
|
@@ -94,7 +95,11 @@ def setup_fastapi_middleware(app: FastAPI, dbos: DBOS) -> None:
|
|
|
94
95
|
with EnterDBOSHandler(attributes):
|
|
95
96
|
ctx = assert_current_dbos_context()
|
|
96
97
|
ctx.request = _make_request(request)
|
|
97
|
-
workflow_id = request.headers.get("dbos-idempotency-key"
|
|
98
|
-
|
|
98
|
+
workflow_id = request.headers.get("dbos-idempotency-key")
|
|
99
|
+
if workflow_id is not None:
|
|
100
|
+
# Set the workflow ID for the handler
|
|
101
|
+
with SetWorkflowID(workflow_id):
|
|
102
|
+
response = await call_next(request)
|
|
103
|
+
else:
|
|
99
104
|
response = await call_next(request)
|
|
100
105
|
return response
|
dbos/_flask.py
CHANGED
|
@@ -34,8 +34,12 @@ class FlaskMiddleware:
|
|
|
34
34
|
with EnterDBOSHandler(attributes):
|
|
35
35
|
ctx = assert_current_dbos_context()
|
|
36
36
|
ctx.request = _make_request(request)
|
|
37
|
-
workflow_id = request.headers.get("dbos-idempotency-key"
|
|
38
|
-
|
|
37
|
+
workflow_id = request.headers.get("dbos-idempotency-key")
|
|
38
|
+
if workflow_id is not None:
|
|
39
|
+
# Set the workflow ID for the handler
|
|
40
|
+
with SetWorkflowID(workflow_id):
|
|
41
|
+
response = self.app(environ, start_response)
|
|
42
|
+
else:
|
|
39
43
|
response = self.app(environ, start_response)
|
|
40
44
|
return response
|
|
41
45
|
|
dbos/_sys_db.py
CHANGED
|
@@ -367,7 +367,7 @@ class SystemDatabase:
|
|
|
367
367
|
with self.engine.begin() as c:
|
|
368
368
|
stmt = (
|
|
369
369
|
sa.update(SystemSchema.workflow_status)
|
|
370
|
-
.where(SystemSchema.
|
|
370
|
+
.where(SystemSchema.workflow_status.c.workflow_uuid == workflow_uuid)
|
|
371
371
|
.values(
|
|
372
372
|
status=status,
|
|
373
373
|
)
|
|
@@ -1265,3 +1265,46 @@ class SystemDatabase:
|
|
|
1265
1265
|
.where(SystemSchema.workflow_queue.c.workflow_uuid == workflow_id)
|
|
1266
1266
|
.values(completed_at_epoch_ms=int(time.time() * 1000))
|
|
1267
1267
|
)
|
|
1268
|
+
|
|
1269
|
+
|
|
1270
|
+
def reset_system_database(config: ConfigFile) -> None:
|
|
1271
|
+
sysdb_name = (
|
|
1272
|
+
config["database"]["sys_db_name"]
|
|
1273
|
+
if "sys_db_name" in config["database"] and config["database"]["sys_db_name"]
|
|
1274
|
+
else config["database"]["app_db_name"] + SystemSchema.sysdb_suffix
|
|
1275
|
+
)
|
|
1276
|
+
postgres_db_url = sa.URL.create(
|
|
1277
|
+
"postgresql+psycopg",
|
|
1278
|
+
username=config["database"]["username"],
|
|
1279
|
+
password=config["database"]["password"],
|
|
1280
|
+
host=config["database"]["hostname"],
|
|
1281
|
+
port=config["database"]["port"],
|
|
1282
|
+
database="postgres",
|
|
1283
|
+
)
|
|
1284
|
+
try:
|
|
1285
|
+
# Connect to postgres default database
|
|
1286
|
+
engine = sa.create_engine(postgres_db_url)
|
|
1287
|
+
|
|
1288
|
+
with engine.connect() as conn:
|
|
1289
|
+
# Set autocommit required for database dropping
|
|
1290
|
+
conn.execution_options(isolation_level="AUTOCOMMIT")
|
|
1291
|
+
|
|
1292
|
+
# Terminate existing connections
|
|
1293
|
+
conn.execute(
|
|
1294
|
+
sa.text(
|
|
1295
|
+
"""
|
|
1296
|
+
SELECT pg_terminate_backend(pg_stat_activity.pid)
|
|
1297
|
+
FROM pg_stat_activity
|
|
1298
|
+
WHERE pg_stat_activity.datname = :db_name
|
|
1299
|
+
AND pid <> pg_backend_pid()
|
|
1300
|
+
"""
|
|
1301
|
+
),
|
|
1302
|
+
{"db_name": sysdb_name},
|
|
1303
|
+
)
|
|
1304
|
+
|
|
1305
|
+
# Drop the database
|
|
1306
|
+
conn.execute(sa.text(f"DROP DATABASE IF EXISTS {sysdb_name}"))
|
|
1307
|
+
|
|
1308
|
+
except sa.exc.SQLAlchemyError as e:
|
|
1309
|
+
dbos_logger.error(f"Error resetting system database: {str(e)}")
|
|
1310
|
+
raise e
|
dbos/_workflow_commands.py
CHANGED
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
import importlib
|
|
2
|
+
import os
|
|
3
|
+
import sys
|
|
1
4
|
from typing import Any, List, Optional, cast
|
|
2
5
|
|
|
3
6
|
import typer
|
|
@@ -6,6 +9,7 @@ from rich import print
|
|
|
6
9
|
from dbos import DBOS
|
|
7
10
|
|
|
8
11
|
from . import _serialization, load_config
|
|
12
|
+
from ._core import execute_workflow_by_id
|
|
9
13
|
from ._dbos_config import ConfigFile, _is_valid_app_name
|
|
10
14
|
from ._sys_db import (
|
|
11
15
|
GetWorkflowsInput,
|
|
@@ -123,11 +127,6 @@ def _cancel_workflow(config: ConfigFile, uuid: str) -> None:
|
|
|
123
127
|
sys_db.destroy()
|
|
124
128
|
|
|
125
129
|
|
|
126
|
-
def _reattempt_workflow(uuid: str, startNewWorkflow: bool) -> None:
|
|
127
|
-
print(f"Reattempt workflow info for {uuid} not implemented")
|
|
128
|
-
return
|
|
129
|
-
|
|
130
|
-
|
|
131
130
|
def _get_workflow_info(
|
|
132
131
|
sys_db: SystemDatabase, workflowUUID: str, getRequest: bool
|
|
133
132
|
) -> Optional[WorkflowInformation]:
|
dbos/cli/_github_init.py
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from base64 import b64decode
|
|
3
|
+
from typing import List, TypedDict
|
|
4
|
+
|
|
5
|
+
import requests
|
|
6
|
+
|
|
7
|
+
DEMO_REPO_API = "https://api.github.com/repos/dbos-inc/dbos-demo-apps"
|
|
8
|
+
PY_DEMO_PATH = "python/"
|
|
9
|
+
BRANCH = "main"
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class GitHubTreeItem(TypedDict):
|
|
13
|
+
path: str
|
|
14
|
+
mode: str
|
|
15
|
+
type: str
|
|
16
|
+
sha: str
|
|
17
|
+
url: str
|
|
18
|
+
size: int
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class GitHubTree(TypedDict):
|
|
22
|
+
sha: str
|
|
23
|
+
url: str
|
|
24
|
+
tree: List[GitHubTreeItem]
|
|
25
|
+
truncated: bool
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class GitHubItem(TypedDict):
|
|
29
|
+
sha: str
|
|
30
|
+
node_id: str
|
|
31
|
+
url: str
|
|
32
|
+
content: str
|
|
33
|
+
encoding: str
|
|
34
|
+
size: int
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _fetch_github(url: str) -> requests.Response:
|
|
38
|
+
headers = {}
|
|
39
|
+
github_token = os.getenv("GITHUB_TOKEN")
|
|
40
|
+
if github_token:
|
|
41
|
+
headers["Authorization"] = f"Bearer {github_token}"
|
|
42
|
+
|
|
43
|
+
response = requests.get(url, headers=headers)
|
|
44
|
+
|
|
45
|
+
if not response.ok:
|
|
46
|
+
if response.headers.get("x-ratelimit-remaining") == "0":
|
|
47
|
+
raise Exception(
|
|
48
|
+
"Error fetching from GitHub API: rate limit exceeded.\n"
|
|
49
|
+
"Please wait a few minutes and try again.\n"
|
|
50
|
+
"To increase the limit, you can create a personal access token and set it in the GITHUB_TOKEN environment variable.\n"
|
|
51
|
+
"Details: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api"
|
|
52
|
+
)
|
|
53
|
+
elif response.status_code == 401:
|
|
54
|
+
raise Exception(
|
|
55
|
+
f"Error fetching content from GitHub {url}: {response.status_code} {response.reason}.\n"
|
|
56
|
+
"Please ensure your GITHUB_TOKEN environment variable is set to a valid personal access token."
|
|
57
|
+
)
|
|
58
|
+
raise Exception(
|
|
59
|
+
f"Error fetching content from GitHub {url}: {response.status_code} {response.reason}"
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
return response
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _fetch_github_tree(tag: str) -> List[GitHubTreeItem]:
|
|
66
|
+
response = _fetch_github(f"{DEMO_REPO_API}/git/trees/{tag}?recursive=1")
|
|
67
|
+
tree_data: GitHubTree = response.json()
|
|
68
|
+
return tree_data["tree"]
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _fetch_github_item(url: str) -> str:
|
|
72
|
+
response = _fetch_github(url)
|
|
73
|
+
item: GitHubItem = response.json()
|
|
74
|
+
return b64decode(item["content"]).decode("utf-8")
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def create_template_from_github(app_name: str, template_name: str) -> None:
|
|
78
|
+
print(
|
|
79
|
+
f"Creating a new application named {app_name} from the template {template_name}"
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
tree = _fetch_github_tree(BRANCH)
|
|
83
|
+
template_path = f"{PY_DEMO_PATH}{template_name}/"
|
|
84
|
+
|
|
85
|
+
files_to_download = [
|
|
86
|
+
item
|
|
87
|
+
for item in tree
|
|
88
|
+
if item["path"].startswith(template_path) and item["type"] == "blob"
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
# Download every file from the template
|
|
92
|
+
for item in files_to_download:
|
|
93
|
+
raw_content = _fetch_github_item(item["url"])
|
|
94
|
+
file_path = item["path"].replace(template_path, "")
|
|
95
|
+
target_path = os.path.join(".", file_path)
|
|
96
|
+
|
|
97
|
+
# Create directory if it doesn't exist
|
|
98
|
+
os.makedirs(os.path.dirname(target_path), exist_ok=True)
|
|
99
|
+
|
|
100
|
+
# Write file with proper permissions
|
|
101
|
+
with open(target_path, "w", encoding="utf-8") as f:
|
|
102
|
+
f.write(raw_content)
|
|
103
|
+
os.chmod(target_path, int(item["mode"], 8))
|
|
104
|
+
|
|
105
|
+
print(
|
|
106
|
+
f"Downloaded {len(files_to_download)} files from the template GitHub repository"
|
|
107
|
+
)
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import typing
|
|
4
|
+
from os import path
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
import tomlkit
|
|
8
|
+
from rich import print
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_templates_directory() -> str:
|
|
12
|
+
import dbos
|
|
13
|
+
|
|
14
|
+
package_dir = path.abspath(path.dirname(dbos.__file__))
|
|
15
|
+
return path.join(package_dir, "_templates")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _copy_dbos_template(src: str, dst: str, ctx: dict[str, str]) -> None:
|
|
19
|
+
with open(src, "r") as f:
|
|
20
|
+
content = f.read()
|
|
21
|
+
|
|
22
|
+
for key, value in ctx.items():
|
|
23
|
+
content = content.replace(f"${{{key}}}", value)
|
|
24
|
+
|
|
25
|
+
with open(dst, "w") as f:
|
|
26
|
+
f.write(content)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _copy_template_dir(src_dir: str, dst_dir: str, ctx: dict[str, str]) -> None:
|
|
30
|
+
|
|
31
|
+
for root, dirs, files in os.walk(src_dir, topdown=True):
|
|
32
|
+
dirs[:] = [d for d in dirs if d != "__package"]
|
|
33
|
+
|
|
34
|
+
dst_root = path.join(dst_dir, path.relpath(root, src_dir))
|
|
35
|
+
if len(dirs) == 0:
|
|
36
|
+
os.makedirs(dst_root, exist_ok=True)
|
|
37
|
+
else:
|
|
38
|
+
for dir in dirs:
|
|
39
|
+
os.makedirs(path.join(dst_root, dir), exist_ok=True)
|
|
40
|
+
|
|
41
|
+
for file in files:
|
|
42
|
+
src = path.join(root, file)
|
|
43
|
+
base, ext = path.splitext(file)
|
|
44
|
+
|
|
45
|
+
dst = path.join(dst_root, base if ext == ".dbos" else file)
|
|
46
|
+
if path.exists(dst):
|
|
47
|
+
print(f"[yellow]File {dst} already exists, skipping[/yellow]")
|
|
48
|
+
continue
|
|
49
|
+
|
|
50
|
+
if ext == ".dbos":
|
|
51
|
+
_copy_dbos_template(src, dst, ctx)
|
|
52
|
+
else:
|
|
53
|
+
shutil.copy(src, dst)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
|
|
57
|
+
|
|
58
|
+
dst_dir = path.abspath(".")
|
|
59
|
+
|
|
60
|
+
package_name = project_name.replace("-", "_")
|
|
61
|
+
ctx = {
|
|
62
|
+
"project_name": project_name,
|
|
63
|
+
"package_name": package_name,
|
|
64
|
+
"migration_command": "alembic upgrade head",
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if config_mode:
|
|
68
|
+
ctx["package_name"] = "."
|
|
69
|
+
ctx["migration_command"] = "echo 'No migrations specified'"
|
|
70
|
+
_copy_dbos_template(
|
|
71
|
+
os.path.join(src_dir, "dbos-config.yaml.dbos"),
|
|
72
|
+
os.path.join(dst_dir, "dbos-config.yaml"),
|
|
73
|
+
ctx,
|
|
74
|
+
)
|
|
75
|
+
else:
|
|
76
|
+
_copy_template_dir(src_dir, dst_dir, ctx)
|
|
77
|
+
_copy_template_dir(
|
|
78
|
+
path.join(src_dir, "__package"), path.join(dst_dir, package_name), ctx
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def get_project_name() -> typing.Union[str, None]:
|
|
83
|
+
name = None
|
|
84
|
+
try:
|
|
85
|
+
with open("pyproject.toml", "rb") as file:
|
|
86
|
+
pyproj = typing.cast(dict[str, Any], tomlkit.load(file))
|
|
87
|
+
name = typing.cast(str, pyproj["project"]["name"])
|
|
88
|
+
except:
|
|
89
|
+
pass
|
|
90
|
+
|
|
91
|
+
if name == None:
|
|
92
|
+
try:
|
|
93
|
+
_, parent = path.split(path.abspath("."))
|
|
94
|
+
name = parent
|
|
95
|
+
except:
|
|
96
|
+
pass
|
|
97
|
+
|
|
98
|
+
return name
|
dbos/{cli.py → cli/cli.py}
RENAMED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import platform
|
|
3
|
-
import shutil
|
|
4
3
|
import signal
|
|
5
4
|
import subprocess
|
|
6
5
|
import time
|
|
@@ -9,25 +8,20 @@ from os import path
|
|
|
9
8
|
from typing import Any
|
|
10
9
|
|
|
11
10
|
import jsonpickle # type: ignore
|
|
11
|
+
import requests
|
|
12
12
|
import sqlalchemy as sa
|
|
13
|
-
import tomlkit
|
|
14
13
|
import typer
|
|
15
14
|
from rich import print
|
|
16
|
-
from rich.prompt import
|
|
15
|
+
from rich.prompt import IntPrompt
|
|
17
16
|
from typing_extensions import Annotated
|
|
18
17
|
|
|
19
|
-
from
|
|
20
|
-
|
|
21
|
-
from
|
|
22
|
-
from
|
|
23
|
-
from
|
|
24
|
-
from .
|
|
25
|
-
from .
|
|
26
|
-
_cancel_workflow,
|
|
27
|
-
_get_workflow,
|
|
28
|
-
_list_workflows,
|
|
29
|
-
_reattempt_workflow,
|
|
30
|
-
)
|
|
18
|
+
from .. import load_config
|
|
19
|
+
from .._app_db import ApplicationDatabase
|
|
20
|
+
from .._dbos_config import _is_valid_app_name
|
|
21
|
+
from .._sys_db import SystemDatabase, reset_system_database
|
|
22
|
+
from .._workflow_commands import _cancel_workflow, _get_workflow, _list_workflows
|
|
23
|
+
from ..cli._github_init import create_template_from_github
|
|
24
|
+
from ._template_init import copy_template, get_project_name, get_templates_directory
|
|
31
25
|
|
|
32
26
|
app = typer.Typer()
|
|
33
27
|
workflow = typer.Typer()
|
|
@@ -90,96 +84,6 @@ def start() -> None:
|
|
|
90
84
|
process.wait()
|
|
91
85
|
|
|
92
86
|
|
|
93
|
-
def _get_templates_directory() -> str:
|
|
94
|
-
import dbos
|
|
95
|
-
|
|
96
|
-
package_dir = path.abspath(path.dirname(dbos.__file__))
|
|
97
|
-
return path.join(package_dir, "_templates")
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
def _copy_dbos_template(src: str, dst: str, ctx: dict[str, str]) -> None:
|
|
101
|
-
with open(src, "r") as f:
|
|
102
|
-
content = f.read()
|
|
103
|
-
|
|
104
|
-
for key, value in ctx.items():
|
|
105
|
-
content = content.replace(f"${{{key}}}", value)
|
|
106
|
-
|
|
107
|
-
with open(dst, "w") as f:
|
|
108
|
-
f.write(content)
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
def _copy_template_dir(src_dir: str, dst_dir: str, ctx: dict[str, str]) -> None:
|
|
112
|
-
|
|
113
|
-
for root, dirs, files in os.walk(src_dir, topdown=True):
|
|
114
|
-
dirs[:] = [d for d in dirs if d != "__package"]
|
|
115
|
-
|
|
116
|
-
dst_root = path.join(dst_dir, path.relpath(root, src_dir))
|
|
117
|
-
if len(dirs) == 0:
|
|
118
|
-
os.makedirs(dst_root, exist_ok=True)
|
|
119
|
-
else:
|
|
120
|
-
for dir in dirs:
|
|
121
|
-
os.makedirs(path.join(dst_root, dir), exist_ok=True)
|
|
122
|
-
|
|
123
|
-
for file in files:
|
|
124
|
-
src = path.join(root, file)
|
|
125
|
-
base, ext = path.splitext(file)
|
|
126
|
-
|
|
127
|
-
dst = path.join(dst_root, base if ext == ".dbos" else file)
|
|
128
|
-
if path.exists(dst):
|
|
129
|
-
print(f"[yellow]File {dst} already exists, skipping[/yellow]")
|
|
130
|
-
continue
|
|
131
|
-
|
|
132
|
-
if ext == ".dbos":
|
|
133
|
-
_copy_dbos_template(src, dst, ctx)
|
|
134
|
-
else:
|
|
135
|
-
shutil.copy(src, dst)
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
def _copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
|
|
139
|
-
|
|
140
|
-
dst_dir = path.abspath(".")
|
|
141
|
-
|
|
142
|
-
package_name = project_name.replace("-", "_")
|
|
143
|
-
ctx = {
|
|
144
|
-
"project_name": project_name,
|
|
145
|
-
"package_name": package_name,
|
|
146
|
-
"migration_command": "alembic upgrade head",
|
|
147
|
-
}
|
|
148
|
-
|
|
149
|
-
if config_mode:
|
|
150
|
-
ctx["package_name"] = "."
|
|
151
|
-
ctx["migration_command"] = "echo 'No migrations specified'"
|
|
152
|
-
_copy_dbos_template(
|
|
153
|
-
os.path.join(src_dir, "dbos-config.yaml.dbos"),
|
|
154
|
-
os.path.join(dst_dir, "dbos-config.yaml"),
|
|
155
|
-
ctx,
|
|
156
|
-
)
|
|
157
|
-
else:
|
|
158
|
-
_copy_template_dir(src_dir, dst_dir, ctx)
|
|
159
|
-
_copy_template_dir(
|
|
160
|
-
path.join(src_dir, "__package"), path.join(dst_dir, package_name), ctx
|
|
161
|
-
)
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
def _get_project_name() -> typing.Union[str, None]:
|
|
165
|
-
name = None
|
|
166
|
-
try:
|
|
167
|
-
with open("pyproject.toml", "rb") as file:
|
|
168
|
-
pyproj = typing.cast(dict[str, Any], tomlkit.load(file))
|
|
169
|
-
name = typing.cast(str, pyproj["project"]["name"])
|
|
170
|
-
except:
|
|
171
|
-
pass
|
|
172
|
-
|
|
173
|
-
if name == None:
|
|
174
|
-
try:
|
|
175
|
-
_, parent = path.split(path.abspath("."))
|
|
176
|
-
name = parent
|
|
177
|
-
except:
|
|
178
|
-
pass
|
|
179
|
-
|
|
180
|
-
return name
|
|
181
|
-
|
|
182
|
-
|
|
183
87
|
@app.command(help="Initialize a new DBOS application from a template")
|
|
184
88
|
def init(
|
|
185
89
|
project_name: Annotated[
|
|
@@ -195,35 +99,62 @@ def init(
|
|
|
195
99
|
] = False,
|
|
196
100
|
) -> None:
|
|
197
101
|
try:
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
102
|
+
|
|
103
|
+
git_templates = ["dbos-toolbox", "dbos-app-starter", "dbos-cron-starter"]
|
|
104
|
+
templates_dir = get_templates_directory()
|
|
105
|
+
templates = git_templates + [
|
|
106
|
+
x.name for x in os.scandir(templates_dir) if x.is_dir()
|
|
107
|
+
]
|
|
108
|
+
|
|
109
|
+
if config and template is None:
|
|
110
|
+
template = templates[-1]
|
|
111
|
+
|
|
112
|
+
if template:
|
|
113
|
+
if template not in templates:
|
|
114
|
+
raise Exception(f"Template {template} not found in {templates_dir}")
|
|
115
|
+
else:
|
|
116
|
+
print("\n[bold]Available templates:[/bold]")
|
|
117
|
+
for idx, template_name in enumerate(templates, 1):
|
|
118
|
+
print(f" {idx}. {template_name}")
|
|
119
|
+
while True:
|
|
120
|
+
try:
|
|
121
|
+
choice = IntPrompt.ask(
|
|
122
|
+
"\nSelect template number",
|
|
123
|
+
show_choices=False,
|
|
124
|
+
show_default=False,
|
|
125
|
+
)
|
|
126
|
+
if 1 <= choice <= len(templates):
|
|
127
|
+
template = templates[choice - 1]
|
|
128
|
+
break
|
|
129
|
+
else:
|
|
130
|
+
print(
|
|
131
|
+
"[red]Invalid selection. Please choose a number from the list.[/red]"
|
|
132
|
+
)
|
|
133
|
+
except (KeyboardInterrupt, EOFError):
|
|
134
|
+
raise typer.Abort()
|
|
135
|
+
except ValueError:
|
|
136
|
+
print("[red]Please enter a valid number.[/red]")
|
|
137
|
+
|
|
138
|
+
if template in git_templates:
|
|
139
|
+
project_name = template
|
|
140
|
+
else:
|
|
141
|
+
if project_name is None:
|
|
142
|
+
project_name = typing.cast(
|
|
143
|
+
str,
|
|
144
|
+
typer.prompt("What is your project's name?", get_project_name()),
|
|
145
|
+
)
|
|
202
146
|
|
|
203
147
|
if not _is_valid_app_name(project_name):
|
|
204
148
|
raise Exception(
|
|
205
149
|
f"{project_name} is an invalid DBOS app name. App names must be between 3 and 30 characters long and contain only lowercase letters, numbers, dashes, and underscores."
|
|
206
150
|
)
|
|
207
151
|
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
if len(templates) == 0:
|
|
211
|
-
raise Exception(f"no DBOS templates found in {templates_dir} ")
|
|
212
|
-
|
|
213
|
-
if template == None:
|
|
214
|
-
if len(templates) == 1:
|
|
215
|
-
template = templates[0]
|
|
216
|
-
else:
|
|
217
|
-
template = Prompt.ask(
|
|
218
|
-
"Which project template do you want to use?", choices=templates
|
|
219
|
-
)
|
|
152
|
+
if template in git_templates:
|
|
153
|
+
create_template_from_github(app_name=project_name, template_name=template)
|
|
220
154
|
else:
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
_copy_template(
|
|
225
|
-
path.join(templates_dir, template), project_name, config_mode=config
|
|
226
|
-
)
|
|
155
|
+
copy_template(
|
|
156
|
+
path.join(templates_dir, template), project_name, config_mode=config
|
|
157
|
+
)
|
|
227
158
|
except Exception as e:
|
|
228
159
|
print(f"[red]{e}[/red]")
|
|
229
160
|
|
|
@@ -292,56 +223,12 @@ def reset(
|
|
|
292
223
|
typer.echo("Operation cancelled.")
|
|
293
224
|
raise typer.Exit()
|
|
294
225
|
config = load_config()
|
|
295
|
-
sysdb_name = (
|
|
296
|
-
config["database"]["sys_db_name"]
|
|
297
|
-
if "sys_db_name" in config["database"] and config["database"]["sys_db_name"]
|
|
298
|
-
else config["database"]["app_db_name"] + SystemSchema.sysdb_suffix
|
|
299
|
-
)
|
|
300
|
-
postgres_db_url = sa.URL.create(
|
|
301
|
-
"postgresql+psycopg",
|
|
302
|
-
username=config["database"]["username"],
|
|
303
|
-
password=config["database"]["password"],
|
|
304
|
-
host=config["database"]["hostname"],
|
|
305
|
-
port=config["database"]["port"],
|
|
306
|
-
database="postgres",
|
|
307
|
-
)
|
|
308
226
|
try:
|
|
309
|
-
|
|
310
|
-
engine = sa.create_engine(postgres_db_url)
|
|
311
|
-
|
|
312
|
-
with engine.connect() as conn:
|
|
313
|
-
# Set autocommit required for database dropping
|
|
314
|
-
conn.execution_options(isolation_level="AUTOCOMMIT")
|
|
315
|
-
|
|
316
|
-
# Terminate existing connections
|
|
317
|
-
conn.execute(
|
|
318
|
-
sa.text(
|
|
319
|
-
"""
|
|
320
|
-
SELECT pg_terminate_backend(pg_stat_activity.pid)
|
|
321
|
-
FROM pg_stat_activity
|
|
322
|
-
WHERE pg_stat_activity.datname = :db_name
|
|
323
|
-
AND pid <> pg_backend_pid()
|
|
324
|
-
"""
|
|
325
|
-
),
|
|
326
|
-
{"db_name": sysdb_name},
|
|
327
|
-
)
|
|
328
|
-
|
|
329
|
-
# Drop the database
|
|
330
|
-
conn.execute(sa.text(f"DROP DATABASE IF EXISTS {sysdb_name}"))
|
|
331
|
-
|
|
227
|
+
reset_system_database(config)
|
|
332
228
|
except sa.exc.SQLAlchemyError as e:
|
|
333
|
-
typer.echo(f"Error
|
|
229
|
+
typer.echo(f"Error resetting system database: {str(e)}")
|
|
334
230
|
return
|
|
335
231
|
|
|
336
|
-
sys_db = None
|
|
337
|
-
try:
|
|
338
|
-
sys_db = SystemDatabase(config)
|
|
339
|
-
except Exception as e:
|
|
340
|
-
typer.echo(f"DBOS system schema migration failed: {e}")
|
|
341
|
-
finally:
|
|
342
|
-
if sys_db:
|
|
343
|
-
sys_db.destroy()
|
|
344
|
-
|
|
345
232
|
|
|
346
233
|
@workflow.command(help="List workflows for your application")
|
|
347
234
|
def list(
|
|
@@ -432,5 +319,49 @@ def cancel(
|
|
|
432
319
|
print(f"Workflow {uuid} has been cancelled")
|
|
433
320
|
|
|
434
321
|
|
|
322
|
+
@workflow.command(help="Resume a workflow that has been cancelled")
|
|
323
|
+
def resume(
|
|
324
|
+
uuid: Annotated[str, typer.Argument()],
|
|
325
|
+
host: Annotated[
|
|
326
|
+
typing.Optional[str],
|
|
327
|
+
typer.Option("--host", "-h", help="Specify the admin host"),
|
|
328
|
+
] = "localhost",
|
|
329
|
+
port: Annotated[
|
|
330
|
+
typing.Optional[int],
|
|
331
|
+
typer.Option("--port", "-p", help="Specify the admin port"),
|
|
332
|
+
] = 3001,
|
|
333
|
+
) -> None:
|
|
334
|
+
response = requests.post(
|
|
335
|
+
f"http://{host}:{port}/workflows/{uuid}/resume", json=[], timeout=5
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
if response.status_code == 200:
|
|
339
|
+
print(f"Workflow {uuid} has been resumed")
|
|
340
|
+
else:
|
|
341
|
+
print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
@workflow.command(help="Restart a workflow from the beginning with a new id")
|
|
345
|
+
def restart(
|
|
346
|
+
uuid: Annotated[str, typer.Argument()],
|
|
347
|
+
host: Annotated[
|
|
348
|
+
typing.Optional[str],
|
|
349
|
+
typer.Option("--host", "-h", help="Specify the admin host"),
|
|
350
|
+
] = "localhost",
|
|
351
|
+
port: Annotated[
|
|
352
|
+
typing.Optional[int],
|
|
353
|
+
typer.Option("--port", "-p", help="Specify the admin port"),
|
|
354
|
+
] = 3001,
|
|
355
|
+
) -> None:
|
|
356
|
+
response = requests.post(
|
|
357
|
+
f"http://{host}:{port}/workflows/{uuid}/restart", json=[], timeout=5
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
if response.status_code == 200:
|
|
361
|
+
print(f"Workflow {uuid} has been restarted")
|
|
362
|
+
else:
|
|
363
|
+
print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
|
|
364
|
+
|
|
365
|
+
|
|
435
366
|
if __name__ == "__main__":
|
|
436
367
|
app()
|
|
@@ -1,23 +1,23 @@
|
|
|
1
|
-
dbos-0.
|
|
2
|
-
dbos-0.
|
|
3
|
-
dbos-0.
|
|
4
|
-
dbos-0.
|
|
1
|
+
dbos-0.20.0.dist-info/METADATA,sha256=4gV-eeocBCalrCoYpN8ryaGJNHvu4clGO7mZfGD-Cco,5307
|
|
2
|
+
dbos-0.20.0.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
|
3
|
+
dbos-0.20.0.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
|
4
|
+
dbos-0.20.0.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
|
5
5
|
dbos/__init__.py,sha256=CxRHBHEthPL4PZoLbZhp3rdm44-KkRTT2-7DkK9d4QQ,724
|
|
6
|
-
dbos/_admin_server.py,sha256=
|
|
6
|
+
dbos/_admin_server.py,sha256=PJgneZG9-64TapZrPeJtt73puAswRImCE5uce2k2PKU,4750
|
|
7
7
|
dbos/_app_db.py,sha256=_tv2vmPjjiaikwgxH3mqxgJ4nUUcG2-0uMXKWCqVu1c,5509
|
|
8
8
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
|
9
9
|
dbos/_cloudutils/authentication.py,sha256=V0fCWQN9stCkhbuuxgPTGpvuQcDqfU3KAxPAh01vKW4,5007
|
|
10
10
|
dbos/_cloudutils/cloudutils.py,sha256=5e3CW1deSW-dI5G3QN0XbiVsBhyqT8wu7fuV2f8wtGU,7688
|
|
11
11
|
dbos/_cloudutils/databases.py,sha256=x4187Djsyoa-QaG3Kog8JT2_GERsnqa93LIVanmVUmg,8393
|
|
12
12
|
dbos/_context.py,sha256=RH08s_nee95vgxdz6AsYuVWF1LuJSVtOyIifblsa4pw,18760
|
|
13
|
-
dbos/_core.py,sha256
|
|
13
|
+
dbos/_core.py,sha256=-2oh2-NicMJBwTwrd2EQBQm4Vu0caozFeoS9Kj47DzM,36588
|
|
14
14
|
dbos/_croniter.py,sha256=hbhgfsHBqclUS8VeLnJ9PSE9Z54z6mi4nnrr1aUXn0k,47561
|
|
15
15
|
dbos/_db_wizard.py,sha256=xgKLna0_6Xi50F3o8msRosXba8NScHlpJR5ICVCkHDQ,7534
|
|
16
|
-
dbos/_dbos.py,sha256=
|
|
16
|
+
dbos/_dbos.py,sha256=1PG142hzPBFguAbuBXaKS-YwzRdaIUW8087JCi78RmU,36193
|
|
17
17
|
dbos/_dbos_config.py,sha256=h_q1gzudhsAMVkGMD0qQ6kLic6YhdJgzm50YFSIx9Bo,8196
|
|
18
18
|
dbos/_error.py,sha256=vtaSsG0QW6cRlwfZ4zzZWy_IHCZlomwSlrDyGWuyn8c,4337
|
|
19
|
-
dbos/_fastapi.py,sha256=
|
|
20
|
-
dbos/_flask.py,sha256=
|
|
19
|
+
dbos/_fastapi.py,sha256=ke03vqsSYDnO6XeOtOVFXj0-f-v1MGsOxa9McaROvNc,3616
|
|
20
|
+
dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
|
|
21
21
|
dbos/_kafka.py,sha256=o6DbwnsYRDtvVTZVsN7BAK8cdP79AfoWX3Q7CGY2Yuo,4199
|
|
22
22
|
dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
|
|
23
23
|
dbos/_logger.py,sha256=iYwbA7DLyXalWa2Yu07HO6Xm301nRuenMU64GgwUMkU,3576
|
|
@@ -41,21 +41,23 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
41
41
|
dbos/_schemas/application_database.py,sha256=KeyoPrF7hy_ODXV7QNike_VFSD74QBRfQ76D7QyE9HI,966
|
|
42
42
|
dbos/_schemas/system_database.py,sha256=rwp4EvCSaXcUoMaRczZCvETCxGp72k3-hvLyGUDkih0,5163
|
|
43
43
|
dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
|
|
44
|
-
dbos/_sys_db.py,sha256=
|
|
45
|
-
dbos/_templates/
|
|
46
|
-
dbos/_templates/
|
|
47
|
-
dbos/_templates/
|
|
48
|
-
dbos/_templates/
|
|
49
|
-
dbos/_templates/
|
|
50
|
-
dbos/_templates/
|
|
51
|
-
dbos/_templates/
|
|
52
|
-
dbos/_templates/
|
|
53
|
-
dbos/_templates/
|
|
54
|
-
dbos/_templates/
|
|
44
|
+
dbos/_sys_db.py,sha256=eXFXzmw_bq5Qp3s2_OzjkQKQj9HxMbP4AyJ2VQnJ08g,53786
|
|
45
|
+
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
|
46
|
+
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
|
+
dbos/_templates/dbos-db-starter/__package/main.py,sha256=eI0SS9Nwj-fldtiuSzIlIG6dC91GXXwdRsoHxv6S_WI,2719
|
|
48
|
+
dbos/_templates/dbos-db-starter/__package/schema.py,sha256=7Z27JGC8yy7Z44cbVXIREYxtUhU4JVkLCp5Q7UahVQ0,260
|
|
49
|
+
dbos/_templates/dbos-db-starter/alembic.ini,sha256=VKBn4Gy8mMuCdY7Hip1jmo3wEUJ1VG1aW7EqY0_n-as,3695
|
|
50
|
+
dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos,sha256=OMlcpdYUJKjyAme7phOz3pbn9upcIRjm42iwEThWUEQ,495
|
|
51
|
+
dbos/_templates/dbos-db-starter/migrations/env.py.dbos,sha256=GUV6sjkDzf9Vl6wkGEd0RSkK-ftRfV6EUwSQdd0qFXg,2392
|
|
52
|
+
dbos/_templates/dbos-db-starter/migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
|
|
53
|
+
dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=U5thFWGqNN4QLrNXT7wUUqftIFDNE5eSdqD8JNW1mec,942
|
|
54
|
+
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
|
55
55
|
dbos/_tracer.py,sha256=rvBY1RQU6DO7rL7EnaJJxGcmd4tP_PpGqUEE6imZnhY,2518
|
|
56
|
-
dbos/_workflow_commands.py,sha256=
|
|
57
|
-
dbos/cli.py,sha256=
|
|
56
|
+
dbos/_workflow_commands.py,sha256=tj-gJARjDJ5aYo0ii2udTAU4l36vbeXwmOYh8Q4y_ac,4625
|
|
57
|
+
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
|
58
|
+
dbos/cli/_template_init.py,sha256=AfuMaO8bmr9WsPNHr6j2cp7kjVVZDUpH7KpbTg0hhFs,2722
|
|
59
|
+
dbos/cli/cli.py,sha256=07TXdfDhImEOjB2-yhWJc1CK07_CSF-xF7TYCtB1kRY,12410
|
|
58
60
|
dbos/dbos-config.schema.json,sha256=X5TpXNcARGceX0zQs0fVgtZW_Xj9uBbY5afPt9Rz9yk,5741
|
|
59
61
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
|
60
62
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
|
61
|
-
dbos-0.
|
|
63
|
+
dbos-0.20.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|