llama-deploy-appserver 0.3.4__tar.gz → 0.3.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/PKG-INFO +5 -4
  2. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/pyproject.toml +5 -4
  3. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/app.py +3 -3
  4. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/README.md +0 -0
  5. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/__init__.py +0 -0
  6. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/bootstrap.py +0 -0
  7. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/configure_logging.py +0 -0
  8. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/correlation_id.py +0 -0
  9. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/deployment.py +0 -0
  10. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/deployment_config_parser.py +0 -0
  11. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/interrupts.py +0 -0
  12. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/process_utils.py +0 -0
  13. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/py.typed +0 -0
  14. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/routers/__init__.py +0 -0
  15. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/routers/deployments.py +0 -0
  16. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/routers/status.py +0 -0
  17. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/routers/ui_proxy.py +0 -0
  18. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/settings.py +0 -0
  19. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/stats.py +0 -0
  20. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/types.py +0 -0
  21. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/workflow_loader.py +0 -0
  22. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/workflow_store/agent_data_store.py +0 -0
  23. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/workflow_store/keyed_lock.py +0 -0
  24. {llama_deploy_appserver-0.3.4 → llama_deploy_appserver-0.3.6}/src/llama_deploy/appserver/workflow_store/lru_cache.py +0 -0
@@ -1,16 +1,15 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: llama-deploy-appserver
3
- Version: 0.3.4
3
+ Version: 0.3.6
4
4
  Summary: Application server components for LlamaDeploy
5
5
  Author: Massimiliano Pippi, Adrian Lyjak
6
6
  Author-email: Massimiliano Pippi <mpippi@gmail.com>, Adrian Lyjak <adrianlyjak@gmail.com>
7
7
  License: MIT
8
- Requires-Dist: llama-index-workflows[server]>=2.2.0
8
+ Requires-Dist: llama-index-workflows[server]>=2.3.0
9
9
  Requires-Dist: pydantic-settings>=2.10.1
10
- Requires-Dist: uvicorn>=0.24.0
11
10
  Requires-Dist: fastapi>=0.100.0
12
11
  Requires-Dist: websockets>=12.0
13
- Requires-Dist: llama-deploy-core>=0.3.4,<0.4.0
12
+ Requires-Dist: llama-deploy-core>=0.3.6,<0.4.0
14
13
  Requires-Dist: httpx>=0.24.0,<1.0.0
15
14
  Requires-Dist: prometheus-fastapi-instrumentator>=7.1.0
16
15
  Requires-Dist: packaging>=25.0
@@ -18,6 +17,8 @@ Requires-Dist: structlog>=25.4.0
18
17
  Requires-Dist: rich>=14.1.0
19
18
  Requires-Dist: pyyaml>=6.0.2
20
19
  Requires-Dist: llama-cloud-services>=0.6.60
20
+ Requires-Dist: watchfiles>=1.1.0
21
+ Requires-Dist: uvicorn>=0.35.0
21
22
  Requires-Python: >=3.11, <4
22
23
  Description-Content-Type: text/markdown
23
24
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "llama-deploy-appserver"
3
- version = "0.3.4"
3
+ version = "0.3.6"
4
4
  description = "Application server components for LlamaDeploy"
5
5
  readme = "README.md"
6
6
  license = { text = "MIT" }
@@ -10,12 +10,11 @@ authors = [
10
10
  ]
11
11
  requires-python = ">=3.11, <4"
12
12
  dependencies = [
13
- "llama-index-workflows[server]>=2.2.0",
13
+ "llama-index-workflows[server]>=2.3.0",
14
14
  "pydantic-settings>=2.10.1",
15
- "uvicorn>=0.24.0",
16
15
  "fastapi>=0.100.0",
17
16
  "websockets>=12.0",
18
- "llama-deploy-core>=0.3.4,<0.4.0",
17
+ "llama-deploy-core>=0.3.6,<0.4.0",
19
18
  "httpx>=0.24.0,<1.0.0",
20
19
  "prometheus-fastapi-instrumentator>=7.1.0",
21
20
  "packaging>=25.0",
@@ -23,6 +22,8 @@ dependencies = [
23
22
  "rich>=14.1.0",
24
23
  "pyyaml>=6.0.2",
25
24
  "llama-cloud-services>=0.6.60",
25
+ "watchfiles>=1.1.0",
26
+ "uvicorn>=0.35.0",
26
27
  ]
27
28
 
28
29
  [build-system]
@@ -40,7 +40,6 @@ from llama_deploy.appserver.workflow_loader import (
40
40
  )
41
41
  from llama_deploy.core.config import DEFAULT_DEPLOYMENT_FILE_PATH
42
42
  from prometheus_fastapi_instrumentator import Instrumentator
43
- from starlette.applications import Starlette
44
43
  from workflows.server import WorkflowServer
45
44
 
46
45
  from .deployment import Deployment
@@ -95,7 +94,7 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, Any]:
95
94
 
96
95
  apiserver_state.state("running")
97
96
  # terrible sad cludge
98
- async with server._lifespan(cast(Starlette, {})):
97
+ async with server.contextmanager():
99
98
  yield
100
99
 
101
100
  apiserver_state.state("stopped")
@@ -208,12 +207,13 @@ def start_server(
208
207
  # Configure logging for the launcher process as well
209
208
  if configure_logging:
210
209
  setup_logging(os.getenv("LOG_LEVEL", "INFO"))
210
+
211
211
  uvicorn.run(
212
212
  "llama_deploy.appserver.app:app",
213
213
  host=settings.host,
214
214
  port=settings.port,
215
215
  reload=reload,
216
- timeout_graceful_shutdown=60,
216
+ timeout_graceful_shutdown=1,
217
217
  access_log=False,
218
218
  log_config=None,
219
219
  )