llama-deploy-appserver 0.3.0a24__tar.gz → 0.3.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/PKG-INFO +2 -2
  2. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/pyproject.toml +2 -2
  3. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/deployment.py +6 -2
  4. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/README.md +0 -0
  5. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/__init__.py +0 -0
  6. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/app.py +0 -0
  7. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/bootstrap.py +0 -0
  8. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/configure_logging.py +0 -0
  9. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/correlation_id.py +0 -0
  10. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/deployment_config_parser.py +0 -0
  11. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/interrupts.py +0 -0
  12. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/process_utils.py +0 -0
  13. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/py.typed +0 -0
  14. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/routers/__init__.py +0 -0
  15. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/routers/deployments.py +0 -0
  16. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/routers/status.py +0 -0
  17. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/routers/ui_proxy.py +0 -0
  18. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/settings.py +0 -0
  19. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/stats.py +0 -0
  20. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/types.py +0 -0
  21. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/workflow_loader.py +0 -0
  22. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/workflow_store/agent_data_store.py +0 -0
  23. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/workflow_store/keyed_lock.py +0 -0
  24. {llama_deploy_appserver-0.3.0a24 → llama_deploy_appserver-0.3.2}/src/llama_deploy/appserver/workflow_store/lru_cache.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: llama-deploy-appserver
3
- Version: 0.3.0a24
3
+ Version: 0.3.2
4
4
  Summary: Application server components for LlamaDeploy
5
5
  Author: Massimiliano Pippi, Adrian Lyjak
6
6
  Author-email: Massimiliano Pippi <mpippi@gmail.com>, Adrian Lyjak <adrianlyjak@gmail.com>
@@ -10,7 +10,7 @@ Requires-Dist: pydantic-settings>=2.10.1
10
10
  Requires-Dist: uvicorn>=0.24.0
11
11
  Requires-Dist: fastapi>=0.100.0
12
12
  Requires-Dist: websockets>=12.0
13
- Requires-Dist: llama-deploy-core>=0.3.0a24,<0.4.0
13
+ Requires-Dist: llama-deploy-core>=0.3.2,<0.4.0
14
14
  Requires-Dist: httpx>=0.24.0,<1.0.0
15
15
  Requires-Dist: prometheus-fastapi-instrumentator>=7.1.0
16
16
  Requires-Dist: packaging>=25.0
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "llama-deploy-appserver"
3
- version = "0.3.0a24"
3
+ version = "0.3.2"
4
4
  description = "Application server components for LlamaDeploy"
5
5
  readme = "README.md"
6
6
  license = { text = "MIT" }
@@ -15,7 +15,7 @@ dependencies = [
15
15
  "uvicorn>=0.24.0",
16
16
  "fastapi>=0.100.0",
17
17
  "websockets>=12.0",
18
- "llama-deploy-core>=0.3.0a24,<0.4.0",
18
+ "llama-deploy-core>=0.3.2,<0.4.0",
19
19
  "httpx>=0.24.0,<1.0.0",
20
20
  "prometheus-fastapi-instrumentator>=7.1.0",
21
21
  "packaging>=25.0",
@@ -96,14 +96,18 @@ class Deployment:
96
96
  ) -> WorkflowServer:
97
97
  persistence = EmptyWorkflowStore()
98
98
  if settings.persistence == "local":
99
+ logger.info("Using local sqlite persistence for workflows")
99
100
  persistence = SqliteWorkflowStore(
100
101
  settings.local_persistence_path or "workflows.db"
101
102
  )
102
103
  elif settings.persistence == "cloud" or (
103
104
  # default to cloud if api key is present to use
104
- settings.persistence is None and os.getenv("LLAMA_DEPLOY_API_KEY")
105
+ settings.persistence is None and os.getenv("LLAMA_CLOUD_API_KEY")
105
106
  ):
107
+ logger.info("Using agent data cloud persistence for workflows")
106
108
  persistence = AgentDataStore(deployment_config, settings)
109
+ else:
110
+ logger.info("Not persisting workflows")
107
111
  server = WorkflowServer(workflow_store=persistence)
108
112
  for service_id, workflow in self._workflow_services.items():
109
113
  server.add_workflow(service_id, workflow)
@@ -137,8 +141,8 @@ class Deployment:
137
141
  # be defensive since this is external and private
138
142
  server_debugger = getattr(server, "_assets_path", None)
139
143
  if isinstance(server_debugger, Path):
140
- app.get(f"/deployments/{config.name}/debugger", include_in_schema=False)
141
144
 
145
+ @app.get(f"/deployments/{config.name}/debugger", include_in_schema=False)
142
146
  @app.get(f"/deployments/{config.name}/debugger/", include_in_schema=False)
143
147
  def redirect_to_debugger() -> RedirectResponse:
144
148
  return RedirectResponse(