llama-deploy-core 0.3.11__py3-none-any.whl → 0.3.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,7 @@ from contextlib import asynccontextmanager
4
4
  from typing import AsyncIterator, Callable, List
5
5
 
6
6
  import httpx
7
+ from llama_deploy.core.client.ssl_util import get_httpx_verify_param
7
8
  from llama_deploy.core.schema import LogEvent
8
9
  from llama_deploy.core.schema.deployments import (
9
10
  DeploymentCreate,
@@ -30,13 +31,15 @@ class BaseClient:
30
31
  if api_key:
31
32
  headers["Authorization"] = f"Bearer {api_key}"
32
33
 
34
+ verify = get_httpx_verify_param()
33
35
  self.client = httpx.AsyncClient(
34
36
  base_url=self.base_url,
35
37
  headers=headers,
36
38
  auth=auth,
39
+ verify=verify,
37
40
  )
38
41
  self.hookless_client = httpx.AsyncClient(
39
- base_url=self.base_url, headers=headers, auth=auth
42
+ base_url=self.base_url, headers=headers, auth=auth, verify=verify
40
43
  )
41
44
 
42
45
  async def aclose(self) -> None:
@@ -0,0 +1,32 @@
1
+ """Utility functions for SSL/TLS configuration with optional truststore support."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ import ssl
7
+ from typing import Any
8
+
9
+ import truststore
10
+
11
+
12
+ def get_ssl_context() -> ssl.SSLContext | bool:
13
+ """Get SSL context for httpx clients.
14
+
15
+ Returns an SSL context using truststore if LLAMA_DEPLOY_USE_TRUSTSTORE is set,
16
+ otherwise returns True (default SSL verification).
17
+
18
+ Truststore allows Python to use the system certificate store, which is useful
19
+ for corporate environments with MITM proxies.
20
+ """
21
+ if os.getenv("LLAMA_DEPLOY_USE_TRUSTSTORE", "").lower() in ("1", "true", "yes"):
22
+ return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
23
+ return True
24
+
25
+
26
+ def get_httpx_verify_param() -> Any:
27
+ """Get the verify parameter for httpx clients.
28
+
29
+ Returns an SSL context using truststore if configured, otherwise returns True.
30
+ This can be passed directly to httpx.Client/AsyncClient's verify parameter.
31
+ """
32
+ return get_ssl_context()
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: llama-deploy-core
3
- Version: 0.3.11
3
+ Version: 0.3.12
4
4
  Summary: Core models and schemas for LlamaDeploy
5
5
  License: MIT
6
6
  Requires-Dist: fastapi>=0.115.0
7
7
  Requires-Dist: overrides>=7.7.0
8
8
  Requires-Dist: pydantic>=2.0.0
9
9
  Requires-Dist: pyyaml>=6.0.2
10
+ Requires-Dist: truststore>=0.10.4
10
11
  Requires-Dist: types-pyyaml>=6.0.12.20250822
11
12
  Requires-Dist: httpx>=0.24.0,<1.0.0 ; extra == 'client'
12
13
  Requires-Dist: fastapi>=0.115.0 ; extra == 'server'
@@ -1,5 +1,6 @@
1
1
  llama_deploy/core/__init__.py,sha256=112612bf2e928c2e0310d6556bb13fc28c00db70297b90a8527486cd2562e408,43
2
- llama_deploy/core/client/manage_client.py,sha256=b54b276519247279c02cd70e0aa965a06cd574b4e3a808e219c40fa8a2d940fe,9326
2
+ llama_deploy/core/client/manage_client.py,sha256=944f565d8e9397e76e94b032d91b818924dc5b5b4c5859eb78e20d670707de00,9479
3
+ llama_deploy/core/client/ssl_util.py,sha256=b9743dc828fa27c18ba0867b1348662cdf0d855965c5a33db63505f23eef5d7b,1010
3
4
  llama_deploy/core/config.py,sha256=69bb0ea8ac169eaa4e808cd60a098b616bddd3145d26c6c35e56db38496b0e6a,35
4
5
  llama_deploy/core/deployment_config.py,sha256=bde431070758421f578f2e27f006152147e8cd752ee1054f1bf7c37ca95b0b38,15853
5
6
  llama_deploy/core/git/git_util.py,sha256=e62a5479c619a5973de203ebcc56b9729b5060c48fcb9cfc2e442756716c2abf,10960
@@ -17,6 +18,6 @@ llama_deploy/core/server/manage_api/_abstract_deployments_service.py,sha256=85ce
17
18
  llama_deploy/core/server/manage_api/_create_deployments_router.py,sha256=9bc8468169445e1cc7f2a479e1c7da42b4bdd7482fa3b440e03ee49cd09a75df,6801
18
19
  llama_deploy/core/server/manage_api/_exceptions.py,sha256=ee71cd9c2354a665e6905cd9cc752d2d65f71f0b936d33fec3c1c5229c38accf,246
19
20
  llama_deploy/core/ui_build.py,sha256=290dafa951918e5593b9035570fa4c66791d7e5ea785bd372ad11e99e8283857,1514
20
- llama_deploy_core-0.3.11.dist-info/WHEEL,sha256=66530aef82d5020ef5af27ae0123c71abb9261377c5bc519376c671346b12918,79
21
- llama_deploy_core-0.3.11.dist-info/METADATA,sha256=bea5d9eef1ef1fa8f4a875f6707e8c2ef08cd3cafee54a3ecd21251318ec1889,664
22
- llama_deploy_core-0.3.11.dist-info/RECORD,,
21
+ llama_deploy_core-0.3.12.dist-info/WHEEL,sha256=66530aef82d5020ef5af27ae0123c71abb9261377c5bc519376c671346b12918,79
22
+ llama_deploy_core-0.3.12.dist-info/METADATA,sha256=4a2b2d5d8204fbef0e58ea396b2d3474d01700d4e06b182cb6cb5f6a234f0d6f,698
23
+ llama_deploy_core-0.3.12.dist-info/RECORD,,