dbos 0.20.0a2__tar.gz → 0.20.0a5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- {dbos-0.20.0a2 → dbos-0.20.0a5}/PKG-INFO +1 -1
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_core.py +21 -1
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_fastapi.py +10 -9
- dbos-0.20.0a5/dbos/cli/_github_init.py +107 -0
- dbos-0.20.0a5/dbos/cli/_template_init.py +98 -0
- {dbos-0.20.0a2/dbos → dbos-0.20.0a5/dbos/cli}/cli.py +56 -122
- {dbos-0.20.0a2 → dbos-0.20.0a5}/pyproject.toml +2 -2
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/conftest.py +1 -9
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_fastapi.py +76 -2
- dbos-0.20.0a5/tests/test_package.py +92 -0
- dbos-0.20.0a2/tests/test_package.py +0 -84
- {dbos-0.20.0a2 → dbos-0.20.0a5}/LICENSE +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/README.md +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/__init__.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_admin_server.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_app_db.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_classproperty.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_cloudutils/authentication.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_cloudutils/cloudutils.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_cloudutils/databases.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_context.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_croniter.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_db_wizard.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_dbos.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_dbos_config.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_error.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_flask.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_kafka.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_kafka_message.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_logger.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_migrations/env.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_migrations/script.py.mako +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_outcome.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_queue.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_recovery.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_registrations.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_request.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_roles.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_scheduler.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_schemas/__init__.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_schemas/application_database.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_schemas/system_database.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_serialization.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_sys_db.py +0 -0
- {dbos-0.20.0a2/dbos/_templates/hello → dbos-0.20.0a5/dbos/_templates/dbos-db-starter}/README.md +0 -0
- {dbos-0.20.0a2/dbos/_templates/hello → dbos-0.20.0a5/dbos/_templates/dbos-db-starter}/__package/__init__.py +0 -0
- {dbos-0.20.0a2/dbos/_templates/hello → dbos-0.20.0a5/dbos/_templates/dbos-db-starter}/__package/main.py +0 -0
- {dbos-0.20.0a2/dbos/_templates/hello → dbos-0.20.0a5/dbos/_templates/dbos-db-starter}/__package/schema.py +0 -0
- {dbos-0.20.0a2/dbos/_templates/hello → dbos-0.20.0a5/dbos/_templates/dbos-db-starter}/alembic.ini +0 -0
- {dbos-0.20.0a2/dbos/_templates/hello → dbos-0.20.0a5/dbos/_templates/dbos-db-starter}/dbos-config.yaml.dbos +0 -0
- {dbos-0.20.0a2/dbos/_templates/hello → dbos-0.20.0a5/dbos/_templates/dbos-db-starter}/migrations/env.py.dbos +0 -0
- {dbos-0.20.0a2/dbos/_templates/hello → dbos-0.20.0a5/dbos/_templates/dbos-db-starter}/migrations/script.py.mako +0 -0
- {dbos-0.20.0a2/dbos/_templates/hello → dbos-0.20.0a5/dbos/_templates/dbos-db-starter}/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-0.20.0a2/dbos/_templates/hello → dbos-0.20.0a5/dbos/_templates/dbos-db-starter}/start_postgres_docker.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_tracer.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_workflow_commands.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/dbos-config.schema.json +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/py.typed +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/__init__.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/atexit_no_ctor.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/atexit_no_launch.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/classdefs.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/more_classdefs.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/queuedworkflow.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_admin_server.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_async.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_classdecorators.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_concurrency.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_config.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_croniter.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_dbos.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_failures.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_fastapi_roles.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_flask.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_kafka.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_outcome.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_queue.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_scheduler.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_schema_migration.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_singleton.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_spans.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/tests/test_workflow_cmds.py +0 -0
- {dbos-0.20.0a2 → dbos-0.20.0a5}/version/__init__.py +0 -0
|
@@ -488,6 +488,22 @@ def start_workflow(
|
|
|
488
488
|
return WorkflowHandleFuture(new_wf_id, future, dbos)
|
|
489
489
|
|
|
490
490
|
|
|
491
|
+
if sys.version_info < (3, 12):
|
|
492
|
+
|
|
493
|
+
def _mark_coroutine(func: Callable[P, R]) -> Callable[P, R]:
|
|
494
|
+
@wraps(func)
|
|
495
|
+
async def async_wrapper(*args: Any, **kwargs: Any) -> R:
|
|
496
|
+
return await func(*args, **kwargs) # type: ignore
|
|
497
|
+
|
|
498
|
+
return async_wrapper # type: ignore
|
|
499
|
+
|
|
500
|
+
else:
|
|
501
|
+
|
|
502
|
+
def _mark_coroutine(func: Callable[P, R]) -> Callable[P, R]:
|
|
503
|
+
inspect.markcoroutinefunction(func)
|
|
504
|
+
return func
|
|
505
|
+
|
|
506
|
+
|
|
491
507
|
def workflow_wrapper(
|
|
492
508
|
dbosreg: "DBOSRegistry",
|
|
493
509
|
func: Callable[P, R],
|
|
@@ -548,7 +564,7 @@ def workflow_wrapper(
|
|
|
548
564
|
)
|
|
549
565
|
return outcome() # type: ignore
|
|
550
566
|
|
|
551
|
-
return wrapper
|
|
567
|
+
return _mark_coroutine(wrapper) if inspect.iscoroutinefunction(func) else wrapper
|
|
552
568
|
|
|
553
569
|
|
|
554
570
|
def decorate_workflow(
|
|
@@ -838,6 +854,10 @@ def decorate_step(
|
|
|
838
854
|
assert tempwf
|
|
839
855
|
return tempwf(*args, **kwargs)
|
|
840
856
|
|
|
857
|
+
wrapper = (
|
|
858
|
+
_mark_coroutine(wrapper) if inspect.iscoroutinefunction(func) else wrapper # type: ignore
|
|
859
|
+
)
|
|
860
|
+
|
|
841
861
|
def temp_wf_sync(*args: Any, **kwargs: Any) -> Any:
|
|
842
862
|
return wrapper(*args, **kwargs)
|
|
843
863
|
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import uuid
|
|
2
|
-
from typing import Any, Callable, cast
|
|
2
|
+
from typing import Any, Callable, MutableMapping, cast
|
|
3
3
|
|
|
4
4
|
from fastapi import FastAPI
|
|
5
5
|
from fastapi import Request as FastAPIRequest
|
|
6
6
|
from fastapi.responses import JSONResponse
|
|
7
|
-
from starlette.types import ASGIApp,
|
|
7
|
+
from starlette.types import ASGIApp, Receive, Scope, Send
|
|
8
8
|
|
|
9
9
|
from . import DBOS
|
|
10
10
|
from ._context import (
|
|
@@ -61,15 +61,16 @@ class LifespanMiddleware:
|
|
|
61
61
|
|
|
62
62
|
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
|
|
63
63
|
if scope["type"] == "lifespan":
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
if message["type"] == "lifespan.startup":
|
|
64
|
+
|
|
65
|
+
async def wrapped_send(message: MutableMapping[str, Any]) -> None:
|
|
66
|
+
if message["type"] == "lifespan.startup.complete":
|
|
67
67
|
self.dbos._launch()
|
|
68
|
-
|
|
69
|
-
elif message["type"] == "lifespan.shutdown":
|
|
68
|
+
elif message["type"] == "lifespan.shutdown.complete":
|
|
70
69
|
self.dbos._destroy()
|
|
71
|
-
|
|
72
|
-
|
|
70
|
+
await send(message)
|
|
71
|
+
|
|
72
|
+
# Call the original app with our wrapped functions
|
|
73
|
+
await self.app(scope, receive, wrapped_send)
|
|
73
74
|
else:
|
|
74
75
|
await self.app(scope, receive, send)
|
|
75
76
|
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from base64 import b64decode
|
|
3
|
+
from typing import List, TypedDict
|
|
4
|
+
|
|
5
|
+
import requests
|
|
6
|
+
|
|
7
|
+
DEMO_REPO_API = "https://api.github.com/repos/dbos-inc/dbos-demo-apps"
|
|
8
|
+
PY_DEMO_PATH = "python/"
|
|
9
|
+
BRANCH = "main"
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class GitHubTreeItem(TypedDict):
|
|
13
|
+
path: str
|
|
14
|
+
mode: str
|
|
15
|
+
type: str
|
|
16
|
+
sha: str
|
|
17
|
+
url: str
|
|
18
|
+
size: int
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class GitHubTree(TypedDict):
|
|
22
|
+
sha: str
|
|
23
|
+
url: str
|
|
24
|
+
tree: List[GitHubTreeItem]
|
|
25
|
+
truncated: bool
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class GitHubItem(TypedDict):
|
|
29
|
+
sha: str
|
|
30
|
+
node_id: str
|
|
31
|
+
url: str
|
|
32
|
+
content: str
|
|
33
|
+
encoding: str
|
|
34
|
+
size: int
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _fetch_github(url: str) -> requests.Response:
|
|
38
|
+
headers = {}
|
|
39
|
+
github_token = os.getenv("GITHUB_TOKEN")
|
|
40
|
+
if github_token:
|
|
41
|
+
headers["Authorization"] = f"Bearer {github_token}"
|
|
42
|
+
|
|
43
|
+
response = requests.get(url, headers=headers)
|
|
44
|
+
|
|
45
|
+
if not response.ok:
|
|
46
|
+
if response.headers.get("x-ratelimit-remaining") == "0":
|
|
47
|
+
raise Exception(
|
|
48
|
+
"Error fetching from GitHub API: rate limit exceeded.\n"
|
|
49
|
+
"Please wait a few minutes and try again.\n"
|
|
50
|
+
"To increase the limit, you can create a personal access token and set it in the GITHUB_TOKEN environment variable.\n"
|
|
51
|
+
"Details: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api"
|
|
52
|
+
)
|
|
53
|
+
elif response.status_code == 401:
|
|
54
|
+
raise Exception(
|
|
55
|
+
f"Error fetching content from GitHub {url}: {response.status_code} {response.reason}.\n"
|
|
56
|
+
"Please ensure your GITHUB_TOKEN environment variable is set to a valid personal access token."
|
|
57
|
+
)
|
|
58
|
+
raise Exception(
|
|
59
|
+
f"Error fetching content from GitHub {url}: {response.status_code} {response.reason}"
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
return response
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _fetch_github_tree(tag: str) -> List[GitHubTreeItem]:
|
|
66
|
+
response = _fetch_github(f"{DEMO_REPO_API}/git/trees/{tag}?recursive=1")
|
|
67
|
+
tree_data: GitHubTree = response.json()
|
|
68
|
+
return tree_data["tree"]
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _fetch_github_item(url: str) -> str:
|
|
72
|
+
response = _fetch_github(url)
|
|
73
|
+
item: GitHubItem = response.json()
|
|
74
|
+
return b64decode(item["content"]).decode("utf-8")
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def create_template_from_github(app_name: str, template_name: str) -> None:
|
|
78
|
+
print(
|
|
79
|
+
f"Creating a new application named {app_name} from the template {template_name}"
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
tree = _fetch_github_tree(BRANCH)
|
|
83
|
+
template_path = f"{PY_DEMO_PATH}{template_name}/"
|
|
84
|
+
|
|
85
|
+
files_to_download = [
|
|
86
|
+
item
|
|
87
|
+
for item in tree
|
|
88
|
+
if item["path"].startswith(template_path) and item["type"] == "blob"
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
# Download every file from the template
|
|
92
|
+
for item in files_to_download:
|
|
93
|
+
raw_content = _fetch_github_item(item["url"])
|
|
94
|
+
file_path = item["path"].replace(template_path, "")
|
|
95
|
+
target_path = os.path.join(".", file_path)
|
|
96
|
+
|
|
97
|
+
# Create directory if it doesn't exist
|
|
98
|
+
os.makedirs(os.path.dirname(target_path), exist_ok=True)
|
|
99
|
+
|
|
100
|
+
# Write file with proper permissions
|
|
101
|
+
with open(target_path, "w", encoding="utf-8") as f:
|
|
102
|
+
f.write(raw_content)
|
|
103
|
+
os.chmod(target_path, int(item["mode"], 8))
|
|
104
|
+
|
|
105
|
+
print(
|
|
106
|
+
f"Downloaded {len(files_to_download)} files from the template GitHub repository"
|
|
107
|
+
)
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import shutil
|
|
3
|
+
import typing
|
|
4
|
+
from os import path
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
import tomlkit
|
|
8
|
+
from rich import print
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_templates_directory() -> str:
|
|
12
|
+
import dbos
|
|
13
|
+
|
|
14
|
+
package_dir = path.abspath(path.dirname(dbos.__file__))
|
|
15
|
+
return path.join(package_dir, "_templates")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _copy_dbos_template(src: str, dst: str, ctx: dict[str, str]) -> None:
|
|
19
|
+
with open(src, "r") as f:
|
|
20
|
+
content = f.read()
|
|
21
|
+
|
|
22
|
+
for key, value in ctx.items():
|
|
23
|
+
content = content.replace(f"${{{key}}}", value)
|
|
24
|
+
|
|
25
|
+
with open(dst, "w") as f:
|
|
26
|
+
f.write(content)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _copy_template_dir(src_dir: str, dst_dir: str, ctx: dict[str, str]) -> None:
|
|
30
|
+
|
|
31
|
+
for root, dirs, files in os.walk(src_dir, topdown=True):
|
|
32
|
+
dirs[:] = [d for d in dirs if d != "__package"]
|
|
33
|
+
|
|
34
|
+
dst_root = path.join(dst_dir, path.relpath(root, src_dir))
|
|
35
|
+
if len(dirs) == 0:
|
|
36
|
+
os.makedirs(dst_root, exist_ok=True)
|
|
37
|
+
else:
|
|
38
|
+
for dir in dirs:
|
|
39
|
+
os.makedirs(path.join(dst_root, dir), exist_ok=True)
|
|
40
|
+
|
|
41
|
+
for file in files:
|
|
42
|
+
src = path.join(root, file)
|
|
43
|
+
base, ext = path.splitext(file)
|
|
44
|
+
|
|
45
|
+
dst = path.join(dst_root, base if ext == ".dbos" else file)
|
|
46
|
+
if path.exists(dst):
|
|
47
|
+
print(f"[yellow]File {dst} already exists, skipping[/yellow]")
|
|
48
|
+
continue
|
|
49
|
+
|
|
50
|
+
if ext == ".dbos":
|
|
51
|
+
_copy_dbos_template(src, dst, ctx)
|
|
52
|
+
else:
|
|
53
|
+
shutil.copy(src, dst)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
|
|
57
|
+
|
|
58
|
+
dst_dir = path.abspath(".")
|
|
59
|
+
|
|
60
|
+
package_name = project_name.replace("-", "_")
|
|
61
|
+
ctx = {
|
|
62
|
+
"project_name": project_name,
|
|
63
|
+
"package_name": package_name,
|
|
64
|
+
"migration_command": "alembic upgrade head",
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
if config_mode:
|
|
68
|
+
ctx["package_name"] = "."
|
|
69
|
+
ctx["migration_command"] = "echo 'No migrations specified'"
|
|
70
|
+
_copy_dbos_template(
|
|
71
|
+
os.path.join(src_dir, "dbos-config.yaml.dbos"),
|
|
72
|
+
os.path.join(dst_dir, "dbos-config.yaml"),
|
|
73
|
+
ctx,
|
|
74
|
+
)
|
|
75
|
+
else:
|
|
76
|
+
_copy_template_dir(src_dir, dst_dir, ctx)
|
|
77
|
+
_copy_template_dir(
|
|
78
|
+
path.join(src_dir, "__package"), path.join(dst_dir, package_name), ctx
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def get_project_name() -> typing.Union[str, None]:
|
|
83
|
+
name = None
|
|
84
|
+
try:
|
|
85
|
+
with open("pyproject.toml", "rb") as file:
|
|
86
|
+
pyproj = typing.cast(dict[str, Any], tomlkit.load(file))
|
|
87
|
+
name = typing.cast(str, pyproj["project"]["name"])
|
|
88
|
+
except:
|
|
89
|
+
pass
|
|
90
|
+
|
|
91
|
+
if name == None:
|
|
92
|
+
try:
|
|
93
|
+
_, parent = path.split(path.abspath("."))
|
|
94
|
+
name = parent
|
|
95
|
+
except:
|
|
96
|
+
pass
|
|
97
|
+
|
|
98
|
+
return name
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import platform
|
|
3
|
-
import shutil
|
|
4
3
|
import signal
|
|
5
4
|
import subprocess
|
|
6
5
|
import time
|
|
@@ -11,19 +10,19 @@ from typing import Any
|
|
|
11
10
|
import jsonpickle # type: ignore
|
|
12
11
|
import requests
|
|
13
12
|
import sqlalchemy as sa
|
|
14
|
-
import tomlkit
|
|
15
13
|
import typer
|
|
16
14
|
from rich import print
|
|
17
|
-
from rich.prompt import
|
|
15
|
+
from rich.prompt import IntPrompt
|
|
18
16
|
from typing_extensions import Annotated
|
|
19
17
|
|
|
20
|
-
from
|
|
21
|
-
|
|
22
|
-
from
|
|
23
|
-
from .
|
|
24
|
-
from
|
|
25
|
-
from
|
|
26
|
-
from .
|
|
18
|
+
from .. import load_config
|
|
19
|
+
from .._app_db import ApplicationDatabase
|
|
20
|
+
from .._dbos_config import _is_valid_app_name
|
|
21
|
+
from .._schemas.system_database import SystemSchema
|
|
22
|
+
from .._sys_db import SystemDatabase
|
|
23
|
+
from .._workflow_commands import _cancel_workflow, _get_workflow, _list_workflows
|
|
24
|
+
from ..cli._github_init import create_template_from_github
|
|
25
|
+
from ._template_init import copy_template, get_project_name, get_templates_directory
|
|
27
26
|
|
|
28
27
|
app = typer.Typer()
|
|
29
28
|
workflow = typer.Typer()
|
|
@@ -86,96 +85,6 @@ def start() -> None:
|
|
|
86
85
|
process.wait()
|
|
87
86
|
|
|
88
87
|
|
|
89
|
-
def _get_templates_directory() -> str:
|
|
90
|
-
import dbos
|
|
91
|
-
|
|
92
|
-
package_dir = path.abspath(path.dirname(dbos.__file__))
|
|
93
|
-
return path.join(package_dir, "_templates")
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
def _copy_dbos_template(src: str, dst: str, ctx: dict[str, str]) -> None:
|
|
97
|
-
with open(src, "r") as f:
|
|
98
|
-
content = f.read()
|
|
99
|
-
|
|
100
|
-
for key, value in ctx.items():
|
|
101
|
-
content = content.replace(f"${{{key}}}", value)
|
|
102
|
-
|
|
103
|
-
with open(dst, "w") as f:
|
|
104
|
-
f.write(content)
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
def _copy_template_dir(src_dir: str, dst_dir: str, ctx: dict[str, str]) -> None:
|
|
108
|
-
|
|
109
|
-
for root, dirs, files in os.walk(src_dir, topdown=True):
|
|
110
|
-
dirs[:] = [d for d in dirs if d != "__package"]
|
|
111
|
-
|
|
112
|
-
dst_root = path.join(dst_dir, path.relpath(root, src_dir))
|
|
113
|
-
if len(dirs) == 0:
|
|
114
|
-
os.makedirs(dst_root, exist_ok=True)
|
|
115
|
-
else:
|
|
116
|
-
for dir in dirs:
|
|
117
|
-
os.makedirs(path.join(dst_root, dir), exist_ok=True)
|
|
118
|
-
|
|
119
|
-
for file in files:
|
|
120
|
-
src = path.join(root, file)
|
|
121
|
-
base, ext = path.splitext(file)
|
|
122
|
-
|
|
123
|
-
dst = path.join(dst_root, base if ext == ".dbos" else file)
|
|
124
|
-
if path.exists(dst):
|
|
125
|
-
print(f"[yellow]File {dst} already exists, skipping[/yellow]")
|
|
126
|
-
continue
|
|
127
|
-
|
|
128
|
-
if ext == ".dbos":
|
|
129
|
-
_copy_dbos_template(src, dst, ctx)
|
|
130
|
-
else:
|
|
131
|
-
shutil.copy(src, dst)
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
def _copy_template(src_dir: str, project_name: str, config_mode: bool) -> None:
|
|
135
|
-
|
|
136
|
-
dst_dir = path.abspath(".")
|
|
137
|
-
|
|
138
|
-
package_name = project_name.replace("-", "_")
|
|
139
|
-
ctx = {
|
|
140
|
-
"project_name": project_name,
|
|
141
|
-
"package_name": package_name,
|
|
142
|
-
"migration_command": "alembic upgrade head",
|
|
143
|
-
}
|
|
144
|
-
|
|
145
|
-
if config_mode:
|
|
146
|
-
ctx["package_name"] = "."
|
|
147
|
-
ctx["migration_command"] = "echo 'No migrations specified'"
|
|
148
|
-
_copy_dbos_template(
|
|
149
|
-
os.path.join(src_dir, "dbos-config.yaml.dbos"),
|
|
150
|
-
os.path.join(dst_dir, "dbos-config.yaml"),
|
|
151
|
-
ctx,
|
|
152
|
-
)
|
|
153
|
-
else:
|
|
154
|
-
_copy_template_dir(src_dir, dst_dir, ctx)
|
|
155
|
-
_copy_template_dir(
|
|
156
|
-
path.join(src_dir, "__package"), path.join(dst_dir, package_name), ctx
|
|
157
|
-
)
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
def _get_project_name() -> typing.Union[str, None]:
|
|
161
|
-
name = None
|
|
162
|
-
try:
|
|
163
|
-
with open("pyproject.toml", "rb") as file:
|
|
164
|
-
pyproj = typing.cast(dict[str, Any], tomlkit.load(file))
|
|
165
|
-
name = typing.cast(str, pyproj["project"]["name"])
|
|
166
|
-
except:
|
|
167
|
-
pass
|
|
168
|
-
|
|
169
|
-
if name == None:
|
|
170
|
-
try:
|
|
171
|
-
_, parent = path.split(path.abspath("."))
|
|
172
|
-
name = parent
|
|
173
|
-
except:
|
|
174
|
-
pass
|
|
175
|
-
|
|
176
|
-
return name
|
|
177
|
-
|
|
178
|
-
|
|
179
88
|
@app.command(help="Initialize a new DBOS application from a template")
|
|
180
89
|
def init(
|
|
181
90
|
project_name: Annotated[
|
|
@@ -191,35 +100,60 @@ def init(
|
|
|
191
100
|
] = False,
|
|
192
101
|
) -> None:
|
|
193
102
|
try:
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
)
|
|
103
|
+
git_templates = ["dbos-app-starter", "dbos-cron-starter"]
|
|
104
|
+
templates_dir = get_templates_directory()
|
|
105
|
+
templates = git_templates + [
|
|
106
|
+
x.name for x in os.scandir(templates_dir) if x.is_dir()
|
|
107
|
+
]
|
|
108
|
+
if len(templates) == 0:
|
|
109
|
+
raise Exception(f"no DBOS templates found in {templates_dir} ")
|
|
110
|
+
|
|
111
|
+
if template:
|
|
112
|
+
if template not in templates:
|
|
113
|
+
raise Exception(f"Template {template} not found in {templates_dir}")
|
|
114
|
+
else:
|
|
115
|
+
print("\n[bold]Available templates:[/bold]")
|
|
116
|
+
for idx, template_name in enumerate(templates, 1):
|
|
117
|
+
print(f" {idx}. {template_name}")
|
|
118
|
+
while True:
|
|
119
|
+
try:
|
|
120
|
+
choice = IntPrompt.ask(
|
|
121
|
+
"\nSelect template number",
|
|
122
|
+
show_choices=False,
|
|
123
|
+
show_default=False,
|
|
124
|
+
)
|
|
125
|
+
if 1 <= choice <= len(templates):
|
|
126
|
+
template = templates[choice - 1]
|
|
127
|
+
break
|
|
128
|
+
else:
|
|
129
|
+
print(
|
|
130
|
+
"[red]Invalid selection. Please choose a number from the list.[/red]"
|
|
131
|
+
)
|
|
132
|
+
except (KeyboardInterrupt, EOFError):
|
|
133
|
+
raise typer.Abort()
|
|
134
|
+
except ValueError:
|
|
135
|
+
print("[red]Please enter a valid number.[/red]")
|
|
136
|
+
|
|
137
|
+
if template in git_templates:
|
|
138
|
+
project_name = template
|
|
139
|
+
else:
|
|
140
|
+
if project_name is None:
|
|
141
|
+
project_name = typing.cast(
|
|
142
|
+
str,
|
|
143
|
+
typer.prompt("What is your project's name?", get_project_name()),
|
|
144
|
+
)
|
|
198
145
|
|
|
199
146
|
if not _is_valid_app_name(project_name):
|
|
200
147
|
raise Exception(
|
|
201
148
|
f"{project_name} is an invalid DBOS app name. App names must be between 3 and 30 characters long and contain only lowercase letters, numbers, dashes, and underscores."
|
|
202
149
|
)
|
|
203
150
|
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
if len(templates) == 0:
|
|
207
|
-
raise Exception(f"no DBOS templates found in {templates_dir} ")
|
|
208
|
-
|
|
209
|
-
if template == None:
|
|
210
|
-
if len(templates) == 1:
|
|
211
|
-
template = templates[0]
|
|
212
|
-
else:
|
|
213
|
-
template = Prompt.ask(
|
|
214
|
-
"Which project template do you want to use?", choices=templates
|
|
215
|
-
)
|
|
151
|
+
if template in git_templates:
|
|
152
|
+
create_template_from_github(app_name=project_name, template_name=template)
|
|
216
153
|
else:
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
_copy_template(
|
|
221
|
-
path.join(templates_dir, template), project_name, config_mode=config
|
|
222
|
-
)
|
|
154
|
+
copy_template(
|
|
155
|
+
path.join(templates_dir, template), project_name, config_mode=config
|
|
156
|
+
)
|
|
223
157
|
except Exception as e:
|
|
224
158
|
print(f"[red]{e}[/red]")
|
|
225
159
|
|
|
@@ -27,13 +27,13 @@ dependencies = [
|
|
|
27
27
|
]
|
|
28
28
|
requires-python = ">=3.9"
|
|
29
29
|
readme = "README.md"
|
|
30
|
-
version = "0.20.
|
|
30
|
+
version = "0.20.0a5"
|
|
31
31
|
|
|
32
32
|
[project.license]
|
|
33
33
|
text = "MIT"
|
|
34
34
|
|
|
35
35
|
[project.scripts]
|
|
36
|
-
dbos = "dbos.cli:app"
|
|
36
|
+
dbos = "dbos.cli.cli:app"
|
|
37
37
|
|
|
38
38
|
[build-system]
|
|
39
39
|
requires = [
|
|
@@ -118,15 +118,7 @@ def dbos_fastapi(
|
|
|
118
118
|
) -> Generator[Tuple[DBOS, FastAPI], Any, None]:
|
|
119
119
|
DBOS.destroy()
|
|
120
120
|
app = FastAPI()
|
|
121
|
-
|
|
122
|
-
# ignore the on_event deprecation warnings
|
|
123
|
-
with warnings.catch_warnings():
|
|
124
|
-
warnings.filterwarnings(
|
|
125
|
-
"ignore",
|
|
126
|
-
category=DeprecationWarning,
|
|
127
|
-
message=r"\s*on_event is deprecated, use lifespan event handlers instead\.",
|
|
128
|
-
)
|
|
129
|
-
dbos = DBOS(fastapi=app, config=config)
|
|
121
|
+
dbos = DBOS(fastapi=app, config=config)
|
|
130
122
|
|
|
131
123
|
# This is for test convenience.
|
|
132
124
|
# Usually fastapi itself does launch, but we are not completing the fastapi lifecycle
|
|
@@ -1,14 +1,18 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
import logging
|
|
2
3
|
import uuid
|
|
3
|
-
from
|
|
4
|
+
from contextlib import asynccontextmanager
|
|
5
|
+
from typing import Any, Tuple
|
|
4
6
|
|
|
7
|
+
import httpx
|
|
5
8
|
import pytest
|
|
6
9
|
import sqlalchemy as sa
|
|
10
|
+
import uvicorn
|
|
7
11
|
from fastapi import FastAPI
|
|
8
12
|
from fastapi.testclient import TestClient
|
|
9
13
|
|
|
10
14
|
# Public API
|
|
11
|
-
from dbos import DBOS
|
|
15
|
+
from dbos import DBOS, ConfigFile
|
|
12
16
|
|
|
13
17
|
# Private API because this is a unit test
|
|
14
18
|
from dbos._context import assert_current_dbos_context
|
|
@@ -157,3 +161,73 @@ def test_endpoint_recovery(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
|
|
|
157
161
|
workflow_handles = DBOS.recover_pending_workflows()
|
|
158
162
|
assert len(workflow_handles) == 1
|
|
159
163
|
assert workflow_handles[0].get_result() == ("a", wfuuid)
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
@pytest.mark.asyncio
|
|
167
|
+
async def test_custom_lifespan(
|
|
168
|
+
config: ConfigFile, cleanup_test_databases: None
|
|
169
|
+
) -> None:
|
|
170
|
+
resource = None
|
|
171
|
+
port = 8000
|
|
172
|
+
|
|
173
|
+
@asynccontextmanager
|
|
174
|
+
async def lifespan(app: FastAPI) -> Any:
|
|
175
|
+
nonlocal resource
|
|
176
|
+
resource = 1
|
|
177
|
+
yield
|
|
178
|
+
resource = None
|
|
179
|
+
|
|
180
|
+
app = FastAPI(lifespan=lifespan)
|
|
181
|
+
|
|
182
|
+
DBOS.destroy()
|
|
183
|
+
DBOS(fastapi=app, config=config)
|
|
184
|
+
|
|
185
|
+
@app.get("/")
|
|
186
|
+
@DBOS.workflow()
|
|
187
|
+
async def resource_workflow() -> Any:
|
|
188
|
+
return {"resource": resource}
|
|
189
|
+
|
|
190
|
+
uvicorn_config = uvicorn.Config(
|
|
191
|
+
app=app, host="127.0.0.1", port=port, log_level="error"
|
|
192
|
+
)
|
|
193
|
+
server = uvicorn.Server(config=uvicorn_config)
|
|
194
|
+
|
|
195
|
+
# Run server in background task
|
|
196
|
+
server_task = asyncio.create_task(server.serve())
|
|
197
|
+
await asyncio.sleep(0.2) # Give server time to start
|
|
198
|
+
|
|
199
|
+
async with httpx.AsyncClient() as client:
|
|
200
|
+
r = await client.get(f"http://127.0.0.1:{port}")
|
|
201
|
+
assert r.json()["resource"] == 1
|
|
202
|
+
|
|
203
|
+
server.should_exit = True
|
|
204
|
+
await server_task
|
|
205
|
+
assert resource is None
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def test_stacked_decorators_wf(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
|
|
209
|
+
dbos, app = dbos_fastapi
|
|
210
|
+
client = TestClient(app)
|
|
211
|
+
|
|
212
|
+
@app.get("/endpoint/{var1}/{var2}")
|
|
213
|
+
@DBOS.workflow()
|
|
214
|
+
async def test_endpoint(var1: str, var2: str) -> str:
|
|
215
|
+
return f"{var1}, {var2}!"
|
|
216
|
+
|
|
217
|
+
response = client.get("/endpoint/plums/deify")
|
|
218
|
+
assert response.status_code == 200
|
|
219
|
+
assert response.text == '"plums, deify!"'
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def test_stacked_decorators_step(dbos_fastapi: Tuple[DBOS, FastAPI]) -> None:
|
|
223
|
+
dbos, app = dbos_fastapi
|
|
224
|
+
client = TestClient(app)
|
|
225
|
+
|
|
226
|
+
@app.get("/endpoint/{var1}/{var2}")
|
|
227
|
+
@DBOS.step()
|
|
228
|
+
async def test_endpoint(var1: str, var2: str) -> str:
|
|
229
|
+
return f"{var1}, {var2}!"
|
|
230
|
+
|
|
231
|
+
response = client.get("/endpoint/plums/deify")
|
|
232
|
+
assert response.status_code == 200
|
|
233
|
+
assert response.text == '"plums, deify!"'
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import shutil
|
|
4
|
+
import signal
|
|
5
|
+
import subprocess
|
|
6
|
+
import tempfile
|
|
7
|
+
import time
|
|
8
|
+
import urllib.error
|
|
9
|
+
import urllib.request
|
|
10
|
+
|
|
11
|
+
import sqlalchemy as sa
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def test_package(build_wheel: str, postgres_db_engine: sa.Engine) -> None:
|
|
15
|
+
|
|
16
|
+
# Clean up the database from previous runs
|
|
17
|
+
for template_name in ["dbos-db-starter", "dbos-app-starter"]:
|
|
18
|
+
db_starter = template_name == "dbos-db-starter"
|
|
19
|
+
app_db_name = template_name.replace("-", "_")
|
|
20
|
+
with postgres_db_engine.connect() as connection:
|
|
21
|
+
connection.execution_options(isolation_level="AUTOCOMMIT")
|
|
22
|
+
connection.execute(sa.text(f"DROP DATABASE IF EXISTS {app_db_name}"))
|
|
23
|
+
connection.execute(
|
|
24
|
+
sa.text(f"DROP DATABASE IF EXISTS {app_db_name}_dbos_sys")
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
with tempfile.TemporaryDirectory() as temp_path:
|
|
28
|
+
temp_path = tempfile.mkdtemp(prefix="dbos-")
|
|
29
|
+
wheel_path = os.path.abspath(build_wheel)
|
|
30
|
+
|
|
31
|
+
# Create a new virtual environment in the temp directory
|
|
32
|
+
venv_path = os.path.join(temp_path, ".venv")
|
|
33
|
+
if os.path.exists(venv_path):
|
|
34
|
+
shutil.rmtree(venv_path)
|
|
35
|
+
|
|
36
|
+
# To create a venv, we need the system Python executable. TODO: Don't hardcode the path.
|
|
37
|
+
subprocess.check_call(
|
|
38
|
+
[os.path.join("/", "usr", "bin", "python3"), "-m", "venv", venv_path]
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
venv = os.environ.copy()
|
|
42
|
+
venv["PATH"] = f"{os.path.join(venv_path, 'bin')}:{venv['PATH']}"
|
|
43
|
+
venv["VIRTUAL_ENV"] = venv_path
|
|
44
|
+
|
|
45
|
+
# Install the dbos package into the virtual environment
|
|
46
|
+
subprocess.check_call(
|
|
47
|
+
["pip", "install", wheel_path], cwd=temp_path, env=venv
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
# initalize the app with dbos scaffolding
|
|
51
|
+
subprocess.check_call(
|
|
52
|
+
["dbos", "init", template_name, "--template", "dbos-db-starter"],
|
|
53
|
+
cwd=temp_path,
|
|
54
|
+
env=venv,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
# Run schema migration
|
|
58
|
+
subprocess.check_call(["dbos", "migrate"], cwd=temp_path, env=venv)
|
|
59
|
+
|
|
60
|
+
# Launch the application in the virtual environment as a background process
|
|
61
|
+
process = subprocess.Popen(["dbos", "start"], cwd=temp_path, env=venv)
|
|
62
|
+
|
|
63
|
+
try:
|
|
64
|
+
url = "http://localhost:8000"
|
|
65
|
+
if db_starter:
|
|
66
|
+
url += "/greeting/dbos"
|
|
67
|
+
max_retries = 10
|
|
68
|
+
for attempt in range(max_retries):
|
|
69
|
+
try:
|
|
70
|
+
with urllib.request.urlopen(url, timeout=1) as response:
|
|
71
|
+
status_code = response.getcode()
|
|
72
|
+
assert status_code == 200
|
|
73
|
+
if db_starter:
|
|
74
|
+
response_data = response.read().decode("utf-8")
|
|
75
|
+
data = json.loads(response_data)
|
|
76
|
+
assert (
|
|
77
|
+
data
|
|
78
|
+
== "Greetings, dbos! You have been greeted 1 times."
|
|
79
|
+
)
|
|
80
|
+
break
|
|
81
|
+
except (urllib.error.URLError, AssertionError) as e:
|
|
82
|
+
if attempt < max_retries - 1: # If not the last attempt
|
|
83
|
+
print(
|
|
84
|
+
f"Attempt {attempt + 1} failed: {e}. Retrying in 1 second..."
|
|
85
|
+
)
|
|
86
|
+
time.sleep(1)
|
|
87
|
+
else:
|
|
88
|
+
print(f"All {max_retries} attempts failed. Last error: {e}")
|
|
89
|
+
raise
|
|
90
|
+
finally:
|
|
91
|
+
os.kill(process.pid, signal.SIGINT)
|
|
92
|
+
process.wait()
|
|
@@ -1,84 +0,0 @@
|
|
|
1
|
-
import json
|
|
2
|
-
import os
|
|
3
|
-
import shutil
|
|
4
|
-
import signal
|
|
5
|
-
import subprocess
|
|
6
|
-
import tempfile
|
|
7
|
-
import time
|
|
8
|
-
import urllib.error
|
|
9
|
-
import urllib.request
|
|
10
|
-
|
|
11
|
-
import sqlalchemy as sa
|
|
12
|
-
|
|
13
|
-
# Public API
|
|
14
|
-
from dbos import load_config
|
|
15
|
-
|
|
16
|
-
# Private API because this is a unit test
|
|
17
|
-
pass
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def test_package(build_wheel: str, postgres_db_engine: sa.Engine) -> None:
|
|
21
|
-
|
|
22
|
-
# Clean up the database from previous runs
|
|
23
|
-
app_db_name = "pkgtest"
|
|
24
|
-
with postgres_db_engine.connect() as connection:
|
|
25
|
-
connection.execution_options(isolation_level="AUTOCOMMIT")
|
|
26
|
-
connection.execute(sa.text(f"DROP DATABASE IF EXISTS {app_db_name}"))
|
|
27
|
-
connection.execute(sa.text(f"DROP DATABASE IF EXISTS {app_db_name}_dbos_sys"))
|
|
28
|
-
|
|
29
|
-
with tempfile.TemporaryDirectory() as temp_path:
|
|
30
|
-
temp_path = tempfile.mkdtemp(prefix="dbos-")
|
|
31
|
-
wheel_path = os.path.abspath(build_wheel)
|
|
32
|
-
|
|
33
|
-
# Create a new virtual environment in the temp directory
|
|
34
|
-
venv_path = os.path.join(temp_path, ".venv")
|
|
35
|
-
if os.path.exists(venv_path):
|
|
36
|
-
shutil.rmtree(venv_path)
|
|
37
|
-
|
|
38
|
-
# To create a venv, we need the system Python executable. TODO: Don't hardcode the path.
|
|
39
|
-
subprocess.check_call(
|
|
40
|
-
[os.path.join("/", "usr", "bin", "python3"), "-m", "venv", venv_path]
|
|
41
|
-
)
|
|
42
|
-
|
|
43
|
-
venv = os.environ.copy()
|
|
44
|
-
venv["PATH"] = f"{os.path.join(venv_path, 'bin')}:{venv['PATH']}"
|
|
45
|
-
venv["VIRTUAL_ENV"] = venv_path
|
|
46
|
-
|
|
47
|
-
# Install the dbos package into the virtual environment
|
|
48
|
-
subprocess.check_call(["pip", "install", wheel_path], cwd=temp_path, env=venv)
|
|
49
|
-
|
|
50
|
-
# initalize the app with dbos scaffolding
|
|
51
|
-
subprocess.check_call(
|
|
52
|
-
["dbos", "init", "pkgtest", "--template", "hello"], cwd=temp_path, env=venv
|
|
53
|
-
)
|
|
54
|
-
|
|
55
|
-
# Run schema migration
|
|
56
|
-
subprocess.check_call(["dbos", "migrate"], cwd=temp_path, env=venv)
|
|
57
|
-
|
|
58
|
-
# Launch the application in the virtual environment as a background process
|
|
59
|
-
process = subprocess.Popen(["dbos", "start"], cwd=temp_path, env=venv)
|
|
60
|
-
|
|
61
|
-
try:
|
|
62
|
-
url = "http://localhost:8000/greeting/dbos"
|
|
63
|
-
max_retries = 10
|
|
64
|
-
for attempt in range(max_retries):
|
|
65
|
-
try:
|
|
66
|
-
with urllib.request.urlopen(url, timeout=1) as response:
|
|
67
|
-
status_code = response.getcode()
|
|
68
|
-
assert status_code == 200
|
|
69
|
-
response_data = response.read().decode("utf-8")
|
|
70
|
-
data = json.loads(response_data)
|
|
71
|
-
assert data == "Greetings, dbos! You have been greeted 1 times."
|
|
72
|
-
break
|
|
73
|
-
except (urllib.error.URLError, AssertionError) as e:
|
|
74
|
-
if attempt < max_retries - 1: # If not the last attempt
|
|
75
|
-
print(
|
|
76
|
-
f"Attempt {attempt + 1} failed: {e}. Retrying in 1 second..."
|
|
77
|
-
)
|
|
78
|
-
time.sleep(1)
|
|
79
|
-
else:
|
|
80
|
-
print(f"All {max_retries} attempts failed. Last error: {e}")
|
|
81
|
-
raise
|
|
82
|
-
finally:
|
|
83
|
-
os.kill(process.pid, signal.SIGINT)
|
|
84
|
-
process.wait()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dbos-0.20.0a2 → dbos-0.20.0a5}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dbos-0.20.0a2/dbos/_templates/hello → dbos-0.20.0a5/dbos/_templates/dbos-db-starter}/README.md
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dbos-0.20.0a2/dbos/_templates/hello → dbos-0.20.0a5/dbos/_templates/dbos-db-starter}/alembic.ini
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|