llama-deploy-core 0.3.0a15__py3-none-any.whl → 0.3.0a17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llama_deploy/core/deployment_config.py +19 -0
- llama_deploy/core/server/manage_api/_create_deployments_router.py +7 -4
- {llama_deploy_core-0.3.0a15.dist-info → llama_deploy_core-0.3.0a17.dist-info}/METADATA +1 -1
- {llama_deploy_core-0.3.0a15.dist-info → llama_deploy_core-0.3.0a17.dist-info}/RECORD +5 -5
- {llama_deploy_core-0.3.0a15.dist-info → llama_deploy_core-0.3.0a17.dist-info}/WHEEL +0 -0
|
@@ -6,12 +6,26 @@ from pathlib import Path
|
|
|
6
6
|
from typing import Any, TypeVar
|
|
7
7
|
|
|
8
8
|
import yaml
|
|
9
|
+
from llama_deploy.core.git.git_util import get_git_root, is_git_repo
|
|
9
10
|
from llama_deploy.core.path_util import validate_path_traversal
|
|
10
11
|
from pydantic import BaseModel, ConfigDict, Field, ValidationError, model_validator
|
|
11
12
|
|
|
12
13
|
DEFAULT_DEPLOYMENT_NAME = "default"
|
|
13
14
|
|
|
14
15
|
|
|
16
|
+
def read_deployment_config_from_git_root_or_cwd(
|
|
17
|
+
cwd: Path, config_path: Path
|
|
18
|
+
) -> "DeploymentConfig":
|
|
19
|
+
"""
|
|
20
|
+
Read the deployment config from the git root or cwd.
|
|
21
|
+
"""
|
|
22
|
+
if is_git_repo():
|
|
23
|
+
git_root = get_git_root()
|
|
24
|
+
relative_cwd_path = cwd.relative_to(git_root)
|
|
25
|
+
return read_deployment_config(git_root, relative_cwd_path / config_path)
|
|
26
|
+
return read_deployment_config(cwd, config_path)
|
|
27
|
+
|
|
28
|
+
|
|
15
29
|
def read_deployment_config(source_root: Path, config_path: Path) -> "DeploymentConfig":
|
|
16
30
|
"""
|
|
17
31
|
Read the deployment config from the config directory.
|
|
@@ -160,6 +174,10 @@ class DeploymentConfig(BaseModel):
|
|
|
160
174
|
default=DEFAULT_DEPLOYMENT_NAME,
|
|
161
175
|
description="The url safe path name of the deployment.",
|
|
162
176
|
)
|
|
177
|
+
llama_cloud: bool = Field(
|
|
178
|
+
default=False,
|
|
179
|
+
description="If true, serving locally expects Llama Cloud access and will inject credentials when possible.",
|
|
180
|
+
)
|
|
163
181
|
app: str | None = Field(
|
|
164
182
|
None,
|
|
165
183
|
description="A full bundle of all workflows as an 'app'. \"path.to_import:app_name\"",
|
|
@@ -186,6 +204,7 @@ class DeploymentConfig(BaseModel):
|
|
|
186
204
|
|
|
187
205
|
return DeploymentConfig(
|
|
188
206
|
name=_pick_non_default(self.name, config.name, "default"),
|
|
207
|
+
llama_cloud=self.llama_cloud or config.llama_cloud,
|
|
189
208
|
app=self.app or config.app,
|
|
190
209
|
workflows={**self.workflows, **config.workflows},
|
|
191
210
|
env_files=list(set(self.env_files + config.env_files)),
|
|
@@ -23,18 +23,21 @@ async def get_project_id(project_id: Annotated[str, Query()]) -> str:
|
|
|
23
23
|
def create_v1beta1_deployments_router(
|
|
24
24
|
deployments_service: AbstractDeploymentsService,
|
|
25
25
|
public_service: AbstractPublicDeploymentsService,
|
|
26
|
-
get_project_id: Callable[
|
|
27
|
-
dependencies: list[params.Depends] =
|
|
28
|
-
public_dependencies: list[params.Depends] =
|
|
26
|
+
get_project_id: Callable[..., Awaitable[str]] = get_project_id,
|
|
27
|
+
dependencies: list[params.Depends] | None = None,
|
|
28
|
+
public_dependencies: list[params.Depends] | None = None,
|
|
29
|
+
include_in_schema: bool = True,
|
|
29
30
|
) -> APIRouter:
|
|
30
|
-
base_router = APIRouter(prefix="/api/v1beta1")
|
|
31
|
+
base_router = APIRouter(prefix="/api/v1beta1", include_in_schema=include_in_schema)
|
|
31
32
|
public_router = APIRouter(
|
|
32
33
|
tags=["v1beta1-deployments-public"],
|
|
33
34
|
dependencies=public_dependencies,
|
|
35
|
+
include_in_schema=include_in_schema,
|
|
34
36
|
)
|
|
35
37
|
router = APIRouter(
|
|
36
38
|
tags=["v1beta1-deployments"],
|
|
37
39
|
dependencies=dependencies,
|
|
40
|
+
include_in_schema=include_in_schema,
|
|
38
41
|
)
|
|
39
42
|
|
|
40
43
|
@public_router.get("/version")
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
llama_deploy/core/__init__.py,sha256=112612bf2e928c2e0310d6556bb13fc28c00db70297b90a8527486cd2562e408,43
|
|
2
2
|
llama_deploy/core/client/manage_client.py,sha256=0f2f63c0d5ba657580af758edd5c5f07602061d7ae13a5964ff82b71127d8324,8542
|
|
3
3
|
llama_deploy/core/config.py,sha256=69bb0ea8ac169eaa4e808cd60a098b616bddd3145d26c6c35e56db38496b0e6a,35
|
|
4
|
-
llama_deploy/core/deployment_config.py,sha256=
|
|
4
|
+
llama_deploy/core/deployment_config.py,sha256=bde431070758421f578f2e27f006152147e8cd752ee1054f1bf7c37ca95b0b38,15853
|
|
5
5
|
llama_deploy/core/git/git_util.py,sha256=c581c1da13871b4e89eda58f56ddb074139454c06ae9b04c0b396fdb2b9a5176,9193
|
|
6
6
|
llama_deploy/core/path_util.py,sha256=14d50c0c337c8450ed46cafc88436027056b365a48370a69cdb76c88d7c26fd1,798
|
|
7
7
|
llama_deploy/core/py.typed,sha256=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855,0
|
|
@@ -13,9 +13,9 @@ llama_deploy/core/schema/projects.py,sha256=726f91e90ff8699c90861d9740819c44c3f0
|
|
|
13
13
|
llama_deploy/core/schema/public.py,sha256=022129c8fc09192f5e503b0500ccf54d106f5712b9cf8ce84b3b1c37e186f930,147
|
|
14
14
|
llama_deploy/core/server/manage_api/__init__.py,sha256=e477ccab59cfd084edbad46f209972a282e623eb314d0847a754a46a16361db5,457
|
|
15
15
|
llama_deploy/core/server/manage_api/_abstract_deployments_service.py,sha256=85ceab2a343c3642db7f77d4a665d5710a14bca920bbfdc25c5f1168cce30b22,4638
|
|
16
|
-
llama_deploy/core/server/manage_api/_create_deployments_router.py,sha256=
|
|
16
|
+
llama_deploy/core/server/manage_api/_create_deployments_router.py,sha256=9bc8468169445e1cc7f2a479e1c7da42b4bdd7482fa3b440e03ee49cd09a75df,6801
|
|
17
17
|
llama_deploy/core/server/manage_api/_exceptions.py,sha256=ee71cd9c2354a665e6905cd9cc752d2d65f71f0b936d33fec3c1c5229c38accf,246
|
|
18
18
|
llama_deploy/core/ui_build.py,sha256=290dafa951918e5593b9035570fa4c66791d7e5ea785bd372ad11e99e8283857,1514
|
|
19
|
-
llama_deploy_core-0.3.
|
|
20
|
-
llama_deploy_core-0.3.
|
|
21
|
-
llama_deploy_core-0.3.
|
|
19
|
+
llama_deploy_core-0.3.0a17.dist-info/WHEEL,sha256=66530aef82d5020ef5af27ae0123c71abb9261377c5bc519376c671346b12918,79
|
|
20
|
+
llama_deploy_core-0.3.0a17.dist-info/METADATA,sha256=85c28a4082b785d8eef56e19c9c2e1598c0e8a08c1ed812dde622bb93828cb16,659
|
|
21
|
+
llama_deploy_core-0.3.0a17.dist-info/RECORD,,
|
|
File without changes
|