llama-deploy-appserver 0.2.7a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- llama_deploy/appserver/__init__.py +0 -0
- llama_deploy/appserver/__main__.py +14 -0
- llama_deploy/appserver/app.py +49 -0
- llama_deploy/appserver/bootstrap.py +43 -0
- llama_deploy/appserver/client/__init__.py +3 -0
- llama_deploy/appserver/client/base.py +30 -0
- llama_deploy/appserver/client/client.py +49 -0
- llama_deploy/appserver/client/models/__init__.py +4 -0
- llama_deploy/appserver/client/models/apiserver.py +356 -0
- llama_deploy/appserver/client/models/model.py +82 -0
- llama_deploy/appserver/deployment.py +495 -0
- llama_deploy/appserver/deployment_config_parser.py +133 -0
- llama_deploy/appserver/routers/__init__.py +4 -0
- llama_deploy/appserver/routers/deployments.py +433 -0
- llama_deploy/appserver/routers/status.py +40 -0
- llama_deploy/appserver/run_autodeploy.py +141 -0
- llama_deploy/appserver/server.py +60 -0
- llama_deploy/appserver/settings.py +83 -0
- llama_deploy/appserver/source_managers/__init__.py +5 -0
- llama_deploy/appserver/source_managers/base.py +33 -0
- llama_deploy/appserver/source_managers/git.py +48 -0
- llama_deploy/appserver/source_managers/local.py +51 -0
- llama_deploy/appserver/stats.py +36 -0
- llama_deploy/appserver/tracing.py +237 -0
- llama_deploy/appserver/types.py +100 -0
- llama_deploy_appserver-0.2.7a1.dist-info/METADATA +23 -0
- llama_deploy_appserver-0.2.7a1.dist-info/RECORD +28 -0
- llama_deploy_appserver-0.2.7a1.dist-info/WHEEL +4 -0
@@ -0,0 +1,495 @@
|
|
1
|
+
import asyncio
|
2
|
+
import importlib
|
3
|
+
import json
|
4
|
+
import logging
|
5
|
+
import os
|
6
|
+
import site
|
7
|
+
import subprocess
|
8
|
+
import sys
|
9
|
+
import tempfile
|
10
|
+
from asyncio.subprocess import Process
|
11
|
+
from multiprocessing.pool import ThreadPool
|
12
|
+
from pathlib import Path
|
13
|
+
from typing import Any, Tuple, Type
|
14
|
+
|
15
|
+
from dotenv import dotenv_values
|
16
|
+
from llama_deploy.appserver.source_managers.base import SyncPolicy
|
17
|
+
from llama_deploy.appserver.types import generate_id
|
18
|
+
from workflows import Context, Workflow
|
19
|
+
from workflows.handler import WorkflowHandler
|
20
|
+
|
21
|
+
from .deployment_config_parser import (
|
22
|
+
DeploymentConfig,
|
23
|
+
Service,
|
24
|
+
SourceType,
|
25
|
+
)
|
26
|
+
from .source_managers import GitSourceManager, LocalSourceManager, SourceManager
|
27
|
+
from .stats import deployment_state, service_state
|
28
|
+
|
29
|
+
logger = logging.getLogger()
|
30
|
+
SOURCE_MANAGERS: dict[SourceType, Type[SourceManager]] = {
|
31
|
+
SourceType.git: GitSourceManager,
|
32
|
+
SourceType.local: LocalSourceManager,
|
33
|
+
}
|
34
|
+
|
35
|
+
|
36
|
+
class Client:
|
37
|
+
# stub class
|
38
|
+
pass
|
39
|
+
|
40
|
+
|
41
|
+
class DeploymentError(Exception): ...
|
42
|
+
|
43
|
+
|
44
|
+
class Deployment:
|
45
|
+
def __init__(
|
46
|
+
self,
|
47
|
+
*,
|
48
|
+
config: DeploymentConfig,
|
49
|
+
base_path: Path,
|
50
|
+
deployment_path: Path,
|
51
|
+
local: bool = False,
|
52
|
+
) -> None:
|
53
|
+
"""Creates a Deployment instance.
|
54
|
+
|
55
|
+
Args:
|
56
|
+
config: The configuration object defining this deployment
|
57
|
+
root_path: The path on the filesystem used to store deployment data
|
58
|
+
local: Whether the deployment is local. If true, sources won't be synced
|
59
|
+
"""
|
60
|
+
self._local = local
|
61
|
+
self._name = config.name
|
62
|
+
self._base_path = base_path
|
63
|
+
# If not local, isolate the deployment in a folder with the same name to avoid conflicts
|
64
|
+
self._deployment_path = (
|
65
|
+
deployment_path if local else deployment_path / config.name
|
66
|
+
)
|
67
|
+
self._client = Client()
|
68
|
+
self._default_service: str | None = None
|
69
|
+
self._running = False
|
70
|
+
self._service_tasks: list[asyncio.Task] = []
|
71
|
+
self._ui_server_process: Process | None = None
|
72
|
+
# Ready to load services
|
73
|
+
self._workflow_services: dict[str, Workflow] = self._load_services(config)
|
74
|
+
self._contexts: dict[str, Context] = {}
|
75
|
+
self._handlers: dict[str, WorkflowHandler] = {}
|
76
|
+
self._handler_inputs: dict[str, str] = {}
|
77
|
+
self._config = config
|
78
|
+
deployment_state.labels(self._name).state("ready")
|
79
|
+
|
80
|
+
@property
|
81
|
+
def default_service(self) -> str:
|
82
|
+
if not self._default_service:
|
83
|
+
self._default_service = list(self._workflow_services.keys())[0]
|
84
|
+
return self._default_service
|
85
|
+
|
86
|
+
@property
|
87
|
+
def client(self) -> Client:
|
88
|
+
"""Returns an async client to interact with this deployment."""
|
89
|
+
return self._client
|
90
|
+
|
91
|
+
@property
|
92
|
+
def name(self) -> str:
|
93
|
+
"""Returns the name of this deployment."""
|
94
|
+
return self._name
|
95
|
+
|
96
|
+
@property
|
97
|
+
def service_names(self) -> list[str]:
|
98
|
+
"""Returns the list of service names in this deployment."""
|
99
|
+
return list(self._workflow_services.keys())
|
100
|
+
|
101
|
+
async def run_workflow(
|
102
|
+
self, service_id: str, session_id: str | None = None, **run_kwargs: dict
|
103
|
+
) -> Any:
|
104
|
+
workflow = self._workflow_services[service_id]
|
105
|
+
if session_id:
|
106
|
+
context = self._contexts[session_id]
|
107
|
+
return await workflow.run(context=context, **run_kwargs)
|
108
|
+
|
109
|
+
if run_kwargs:
|
110
|
+
return await workflow.run(**run_kwargs)
|
111
|
+
|
112
|
+
return await workflow.run()
|
113
|
+
|
114
|
+
def run_workflow_no_wait(
|
115
|
+
self, service_id: str, session_id: str | None = None, **run_kwargs: dict
|
116
|
+
) -> Tuple[str, str]:
|
117
|
+
workflow = self._workflow_services[service_id]
|
118
|
+
if session_id:
|
119
|
+
context = self._contexts[session_id]
|
120
|
+
handler = workflow.run(context=context, **run_kwargs)
|
121
|
+
else:
|
122
|
+
handler = workflow.run(**run_kwargs)
|
123
|
+
session_id = generate_id()
|
124
|
+
self._contexts[session_id] = handler.ctx or Context(workflow)
|
125
|
+
|
126
|
+
handler_id = generate_id()
|
127
|
+
self._handlers[handler_id] = handler
|
128
|
+
self._handler_inputs[handler_id] = json.dumps(run_kwargs)
|
129
|
+
return handler_id, session_id
|
130
|
+
|
131
|
+
async def start(self) -> None:
|
132
|
+
"""The task that will be launched in this deployment asyncio loop.
|
133
|
+
|
134
|
+
This task is responsible for launching asyncio tasks for the core components and the services.
|
135
|
+
All the tasks are gathered before returning.
|
136
|
+
"""
|
137
|
+
self._running = True
|
138
|
+
|
139
|
+
# UI
|
140
|
+
if self._config.ui:
|
141
|
+
await self._start_ui_server()
|
142
|
+
|
143
|
+
async def reload(self, config: DeploymentConfig) -> None:
|
144
|
+
# Reset default service, it might change across reloads
|
145
|
+
self._default_service = None
|
146
|
+
# Tear down the UI server
|
147
|
+
self._stop_ui_server()
|
148
|
+
# Reload the services
|
149
|
+
self._workflow_services = self._load_services(config)
|
150
|
+
|
151
|
+
# UI
|
152
|
+
if self._config.ui:
|
153
|
+
await self._start_ui_server()
|
154
|
+
|
155
|
+
def _stop_ui_server(self) -> None:
|
156
|
+
if self._ui_server_process is None:
|
157
|
+
return
|
158
|
+
|
159
|
+
self._ui_server_process.terminate()
|
160
|
+
|
161
|
+
async def _start_ui_server(self) -> None:
|
162
|
+
"""Creates WorkflowService instances according to the configuration object."""
|
163
|
+
if not self._config.ui:
|
164
|
+
raise ValueError("missing ui configuration settings")
|
165
|
+
|
166
|
+
source = self._config.ui.source
|
167
|
+
if source is None:
|
168
|
+
raise ValueError("source must be defined")
|
169
|
+
|
170
|
+
# Sync the service source
|
171
|
+
destination = self._deployment_path.resolve()
|
172
|
+
source_manager = SOURCE_MANAGERS[source.type](self._config, self._base_path)
|
173
|
+
policy = source.sync_policy or (
|
174
|
+
SyncPolicy.SKIP if self._local else SyncPolicy.REPLACE
|
175
|
+
)
|
176
|
+
source_manager.sync(source.location, str(destination), policy)
|
177
|
+
installed_path = destination / source_manager.relative_path(source.location)
|
178
|
+
|
179
|
+
install = await asyncio.create_subprocess_exec(
|
180
|
+
"pnpm", "install", cwd=installed_path
|
181
|
+
)
|
182
|
+
await install.wait()
|
183
|
+
|
184
|
+
env = os.environ.copy()
|
185
|
+
# TODO - delete me later once templates refactored to not depend on these
|
186
|
+
env["LLAMA_DEPLOY_NEXTJS_BASE_PATH"] = f"/deployments/{self._config.name}/ui"
|
187
|
+
env["LLAMA_DEPLOY_NEXTJS_DEPLOYMENT_NAME"] = self._config.name
|
188
|
+
# END TODO
|
189
|
+
# Note! Cloud Llama Deploy also sets a LLAMA_DEPLOY_DEPLOYMENT_NAME, which _must_ be undefined when running locally, otherwise,
|
190
|
+
# the UI will make assumptions that it is in a deployed environment. If we configure the templates to check LLAMA_DEPLOY_IS_DEPLOYED instead, then
|
191
|
+
# we can just always define LLAMA_DEPLOY_DEPLOYMENT_NAME to keep things simple
|
192
|
+
env["LLAMA_DEPLOY_DEPLOYMENT_URL_ID"] = self._config.name
|
193
|
+
env["LLAMA_DEPLOY_DEPLOYMENT_BASE_PATH"] = (
|
194
|
+
f"/deployments/{self._config.name}/ui"
|
195
|
+
)
|
196
|
+
# Override PORT and force using the one from the deployment.yaml file
|
197
|
+
env["PORT"] = str(self._config.ui.port)
|
198
|
+
|
199
|
+
self._ui_server_process = await asyncio.create_subprocess_exec(
|
200
|
+
"pnpm",
|
201
|
+
"run",
|
202
|
+
"dev",
|
203
|
+
cwd=installed_path,
|
204
|
+
env=env,
|
205
|
+
)
|
206
|
+
|
207
|
+
print(f"Started Next.js app with PID {self._ui_server_process.pid}")
|
208
|
+
|
209
|
+
def _load_services(self, config: DeploymentConfig) -> dict[str, Workflow]:
|
210
|
+
"""Creates WorkflowService instances according to the configuration object."""
|
211
|
+
deployment_state.labels(self._name).state("loading_services")
|
212
|
+
workflow_services = {}
|
213
|
+
for service_id, service_config in config.services.items():
|
214
|
+
service_state.labels(self._name, service_id).state("loading")
|
215
|
+
source = service_config.source
|
216
|
+
if source is None:
|
217
|
+
# this is a default service, skip for now
|
218
|
+
# TODO: check the service name is valid and supported
|
219
|
+
# TODO: possibly start the default service if not running already
|
220
|
+
continue
|
221
|
+
|
222
|
+
if service_config.import_path is None:
|
223
|
+
msg = "path field in service definition must be set"
|
224
|
+
raise ValueError(msg)
|
225
|
+
|
226
|
+
# Sync the service source
|
227
|
+
service_state.labels(self._name, service_id).state("syncing")
|
228
|
+
destination = self._deployment_path.resolve()
|
229
|
+
source_manager = SOURCE_MANAGERS[source.type](config, self._base_path)
|
230
|
+
policy = SyncPolicy.SKIP if self._local else SyncPolicy.REPLACE
|
231
|
+
source_manager.sync(source.location, str(destination), policy)
|
232
|
+
|
233
|
+
# Install dependencies
|
234
|
+
service_state.labels(self._name, service_id).state("installing")
|
235
|
+
self._install_dependencies(service_config, destination)
|
236
|
+
|
237
|
+
# Set environment variables
|
238
|
+
self._set_environment_variables(service_config, destination)
|
239
|
+
|
240
|
+
# Search for a workflow instance in the service path
|
241
|
+
module_path_str, workflow_name = service_config.import_path.split(":")
|
242
|
+
module_path = Path(module_path_str)
|
243
|
+
module_name = module_path.name
|
244
|
+
pythonpath = (destination / module_path.parent).resolve()
|
245
|
+
logger.debug("Extending PYTHONPATH to %s", pythonpath)
|
246
|
+
sys.path.append(str(pythonpath))
|
247
|
+
|
248
|
+
module = importlib.import_module(module_name)
|
249
|
+
workflow_services[service_id] = getattr(module, workflow_name)
|
250
|
+
|
251
|
+
service_state.labels(self._name, service_id).state("ready")
|
252
|
+
|
253
|
+
if config.default_service:
|
254
|
+
if config.default_service in workflow_services:
|
255
|
+
self._default_service = config.default_service
|
256
|
+
else:
|
257
|
+
msg = f"Service with id '{config.default_service}' does not exist, cannot set it as default."
|
258
|
+
logger.warning(msg)
|
259
|
+
self._default_service = None
|
260
|
+
|
261
|
+
return workflow_services
|
262
|
+
|
263
|
+
@staticmethod
|
264
|
+
def _validate_path_is_safe(
|
265
|
+
path: str, source_root: Path, path_type: str = "path"
|
266
|
+
) -> None:
|
267
|
+
"""Validates that a path is within the source root to prevent path traversal attacks.
|
268
|
+
|
269
|
+
Args:
|
270
|
+
path: The path to validate
|
271
|
+
source_root: The root directory that paths should be relative to
|
272
|
+
path_type: Description of the path type for error messages
|
273
|
+
|
274
|
+
Raises:
|
275
|
+
DeploymentError: If the path is outside the source root
|
276
|
+
"""
|
277
|
+
resolved_path = (source_root / path).resolve()
|
278
|
+
resolved_source_root = source_root.resolve()
|
279
|
+
|
280
|
+
if not resolved_path.is_relative_to(resolved_source_root):
|
281
|
+
msg = f"{path_type} {path} is not a subdirectory of the source root {source_root}"
|
282
|
+
raise DeploymentError(msg)
|
283
|
+
|
284
|
+
@staticmethod
|
285
|
+
def _set_environment_variables(
|
286
|
+
service_config: Service, root: Path | None = None
|
287
|
+
) -> None:
|
288
|
+
"""Sets environment variables for the service."""
|
289
|
+
env_vars: dict[str, str | None] = {}
|
290
|
+
|
291
|
+
if service_config.env:
|
292
|
+
env_vars.update(**service_config.env)
|
293
|
+
|
294
|
+
if service_config.env_files:
|
295
|
+
for env_file in service_config.env_files:
|
296
|
+
# use dotenv to parse env_file
|
297
|
+
env_file_path = root / env_file if root else Path(env_file)
|
298
|
+
env_vars.update(**dotenv_values(env_file_path))
|
299
|
+
|
300
|
+
for k, v in env_vars.items():
|
301
|
+
if v:
|
302
|
+
os.environ[k] = v
|
303
|
+
|
304
|
+
@staticmethod
|
305
|
+
def _install_dependencies(service_config: Service, source_root: Path) -> None:
|
306
|
+
"""Runs `pip install` on the items listed under `python-dependencies` in the service configuration."""
|
307
|
+
if not service_config.python_dependencies:
|
308
|
+
return
|
309
|
+
install_args = []
|
310
|
+
for dep in service_config.python_dependencies or []:
|
311
|
+
if dep.endswith("requirements.txt"):
|
312
|
+
Deployment._validate_path_is_safe(dep, source_root, "requirements file")
|
313
|
+
resolved_dep = source_root / dep
|
314
|
+
install_args.extend(["-r", str(resolved_dep)])
|
315
|
+
else:
|
316
|
+
if "." in dep or "/" in dep:
|
317
|
+
Deployment._validate_path_is_safe(
|
318
|
+
dep, source_root, "dependency path"
|
319
|
+
)
|
320
|
+
resolved_dep = source_root / dep
|
321
|
+
if os.path.isfile(resolved_dep) or os.path.isdir(resolved_dep):
|
322
|
+
# install as editable, such that sources are left in place, and can reference repository files
|
323
|
+
install_args.extend(["-e", str(resolved_dep.resolve())])
|
324
|
+
else:
|
325
|
+
install_args.append(dep)
|
326
|
+
else:
|
327
|
+
install_args.append(dep)
|
328
|
+
|
329
|
+
# Check if uv is available on the path
|
330
|
+
uv_available = False
|
331
|
+
try:
|
332
|
+
subprocess.check_call(
|
333
|
+
["uv", "--version"],
|
334
|
+
stdout=subprocess.DEVNULL,
|
335
|
+
stderr=subprocess.DEVNULL,
|
336
|
+
)
|
337
|
+
uv_available = True
|
338
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
339
|
+
pass
|
340
|
+
if not uv_available:
|
341
|
+
# bootstrap uv with pip
|
342
|
+
try:
|
343
|
+
subprocess.check_call(
|
344
|
+
[
|
345
|
+
sys.executable,
|
346
|
+
"-m",
|
347
|
+
"pip",
|
348
|
+
"install",
|
349
|
+
"uv",
|
350
|
+
]
|
351
|
+
)
|
352
|
+
except subprocess.CalledProcessError as e:
|
353
|
+
msg = f"Unable to install uv. Environment must include uv, or uv must be installed with pip: {e.stderr}"
|
354
|
+
raise DeploymentError(msg)
|
355
|
+
|
356
|
+
# Bit of an ugly hack, install to whatever python environment we're currently in
|
357
|
+
# Find the python bin path and get its parent dir, and install into whatever that
|
358
|
+
# python is. Hopefully we're in a container or a venv, otherwise this is installing to
|
359
|
+
# the system python
|
360
|
+
# https://docs.astral.sh/uv/concepts/projects/config/#project-environment-path
|
361
|
+
python_bin_path = os.path.dirname(sys.executable)
|
362
|
+
python_parent_dir = os.path.dirname(python_bin_path)
|
363
|
+
if install_args:
|
364
|
+
try:
|
365
|
+
subprocess.check_call(
|
366
|
+
[
|
367
|
+
"uv",
|
368
|
+
"pip",
|
369
|
+
"install",
|
370
|
+
f"--prefix={python_parent_dir}", # installs to the current python environment
|
371
|
+
*install_args,
|
372
|
+
],
|
373
|
+
cwd=source_root,
|
374
|
+
)
|
375
|
+
|
376
|
+
# Force Python to refresh its package discovery after installing new packages
|
377
|
+
site.main() # Refresh site-packages paths
|
378
|
+
# Clear import caches to ensure newly installed packages are discoverable
|
379
|
+
importlib.invalidate_caches()
|
380
|
+
|
381
|
+
except subprocess.CalledProcessError as e:
|
382
|
+
msg = f"Unable to install service dependencies using command '{e.cmd}': {e.stderr}"
|
383
|
+
raise DeploymentError(msg) from None
|
384
|
+
|
385
|
+
|
386
|
+
class Manager:
|
387
|
+
"""The Manager orchestrates deployments and their runtime.
|
388
|
+
|
389
|
+
Usage example:
|
390
|
+
```python
|
391
|
+
config = Config.from_yaml(data_path / "git_service.yaml")
|
392
|
+
manager = Manager(tmp_path)
|
393
|
+
t = threading.Thread(target=asyncio.run, args=(manager.serve(),))
|
394
|
+
t.start()
|
395
|
+
manager.deploy(config)
|
396
|
+
t.join()
|
397
|
+
```
|
398
|
+
"""
|
399
|
+
|
400
|
+
def __init__(self, max_deployments: int = 10) -> None:
|
401
|
+
"""Creates a Manager instance.
|
402
|
+
|
403
|
+
Args:
|
404
|
+
max_deployments: The maximum number of deployments supported by this manager.
|
405
|
+
"""
|
406
|
+
self._deployments: dict[str, Deployment] = {}
|
407
|
+
self._deployments_path: Path | None = None
|
408
|
+
self._max_deployments = max_deployments
|
409
|
+
self._pool = ThreadPool(processes=max_deployments)
|
410
|
+
self._last_control_plane_port = 8002
|
411
|
+
self._simple_message_queue_server: asyncio.Task | None = None
|
412
|
+
self._serving = False
|
413
|
+
|
414
|
+
@property
|
415
|
+
def deployment_names(self) -> list[str]:
|
416
|
+
"""Return a list of names for the active deployments."""
|
417
|
+
return list(self._deployments.keys())
|
418
|
+
|
419
|
+
@property
|
420
|
+
def deployments_path(self) -> Path:
|
421
|
+
if self._deployments_path is None:
|
422
|
+
raise ValueError("Deployments path not set")
|
423
|
+
return self._deployments_path
|
424
|
+
|
425
|
+
def set_deployments_path(self, path: Path | None) -> None:
|
426
|
+
self._deployments_path = (
|
427
|
+
path or Path(tempfile.gettempdir()) / "llama_deploy" / "deployments"
|
428
|
+
)
|
429
|
+
|
430
|
+
def get_deployment(self, deployment_name: str) -> Deployment | None:
|
431
|
+
return self._deployments.get(deployment_name)
|
432
|
+
|
433
|
+
async def serve(self) -> None:
|
434
|
+
"""The server loop, it keeps the manager running."""
|
435
|
+
if self._deployments_path is None:
|
436
|
+
raise RuntimeError("Deployments path not set")
|
437
|
+
|
438
|
+
self._serving = True
|
439
|
+
|
440
|
+
event = asyncio.Event()
|
441
|
+
try:
|
442
|
+
# Waits indefinitely since `event` will never be set
|
443
|
+
await event.wait()
|
444
|
+
except asyncio.CancelledError:
|
445
|
+
if self._simple_message_queue_server is not None:
|
446
|
+
self._simple_message_queue_server.cancel()
|
447
|
+
await self._simple_message_queue_server
|
448
|
+
|
449
|
+
async def deploy(
|
450
|
+
self,
|
451
|
+
config: DeploymentConfig,
|
452
|
+
base_path: str,
|
453
|
+
reload: bool = False,
|
454
|
+
local: bool = False,
|
455
|
+
) -> None:
|
456
|
+
"""Creates a Deployment instance and starts the relative runtime.
|
457
|
+
|
458
|
+
Args:
|
459
|
+
config: The deployment configuration.
|
460
|
+
reload: Reload an existing deployment instead of raising an error.
|
461
|
+
local: Deploy a local configuration. Source code will be used in place locally.
|
462
|
+
|
463
|
+
Raises:
|
464
|
+
ValueError: If a deployment with the same name already exists or the maximum number of deployment exceeded.
|
465
|
+
DeploymentError: If it wasn't possible to create a deployment.
|
466
|
+
"""
|
467
|
+
if not self._serving:
|
468
|
+
raise RuntimeError("Manager main loop not started, call serve() first.")
|
469
|
+
|
470
|
+
if not reload:
|
471
|
+
# Raise an error if deployment already exists
|
472
|
+
if config.name in self._deployments:
|
473
|
+
msg = f"Deployment already exists: {config.name}"
|
474
|
+
raise ValueError(msg)
|
475
|
+
|
476
|
+
# Raise an error if we can't create any new deployment
|
477
|
+
if len(self._deployments) == self._max_deployments:
|
478
|
+
msg = "Reached the maximum number of deployments, cannot schedule more"
|
479
|
+
raise ValueError(msg)
|
480
|
+
|
481
|
+
deployment = Deployment(
|
482
|
+
config=config,
|
483
|
+
base_path=Path(base_path),
|
484
|
+
deployment_path=self.deployments_path,
|
485
|
+
local=local,
|
486
|
+
)
|
487
|
+
self._deployments[config.name] = deployment
|
488
|
+
await deployment.start()
|
489
|
+
else:
|
490
|
+
if config.name not in self._deployments:
|
491
|
+
msg = f"Cannot find deployment to reload: {config.name}"
|
492
|
+
raise ValueError(msg)
|
493
|
+
|
494
|
+
deployment = self._deployments[config.name]
|
495
|
+
await deployment.reload(config)
|
@@ -0,0 +1,133 @@
|
|
1
|
+
import sys
|
2
|
+
import warnings
|
3
|
+
from enum import Enum
|
4
|
+
from pathlib import Path
|
5
|
+
from typing import Any, Optional
|
6
|
+
|
7
|
+
if sys.version_info >= (3, 11):
|
8
|
+
from typing import Self
|
9
|
+
else: # pragma: no cover
|
10
|
+
from typing_extensions import Self
|
11
|
+
|
12
|
+
import yaml
|
13
|
+
from pydantic import BaseModel, ConfigDict, Field, model_validator
|
14
|
+
|
15
|
+
|
16
|
+
class SourceType(str, Enum):
|
17
|
+
"""Supported types for the `Service.source` parameter."""
|
18
|
+
|
19
|
+
git = "git"
|
20
|
+
docker = "docker"
|
21
|
+
local = "local"
|
22
|
+
|
23
|
+
|
24
|
+
class SyncPolicy(Enum):
|
25
|
+
"""Define the sync behaviour in case the destination target exists."""
|
26
|
+
|
27
|
+
REPLACE = "replace"
|
28
|
+
MERGE = "merge"
|
29
|
+
SKIP = "skip"
|
30
|
+
FAIL = "fail"
|
31
|
+
|
32
|
+
|
33
|
+
class ServiceSource(BaseModel):
|
34
|
+
"""Configuration for the `source` parameter of a service."""
|
35
|
+
|
36
|
+
type: SourceType
|
37
|
+
location: str
|
38
|
+
sync_policy: Optional[SyncPolicy] = None
|
39
|
+
|
40
|
+
@model_validator(mode="before")
|
41
|
+
@classmethod
|
42
|
+
def handle_deprecated_fields(cls, data: Any) -> Any:
|
43
|
+
if isinstance(data, dict):
|
44
|
+
if "name" in data and "location" not in data: # pragma: no cover
|
45
|
+
warnings.warn(
|
46
|
+
"The 'name' field is deprecated. Use 'location' instead.",
|
47
|
+
DeprecationWarning,
|
48
|
+
)
|
49
|
+
data["location"] = data["name"]
|
50
|
+
return data
|
51
|
+
|
52
|
+
|
53
|
+
class Service(BaseModel):
|
54
|
+
"""Configuration for a single service."""
|
55
|
+
|
56
|
+
name: str
|
57
|
+
source: ServiceSource
|
58
|
+
import_path: str | None = Field(None)
|
59
|
+
host: str | None = None
|
60
|
+
port: int | None = None
|
61
|
+
env: dict[str, str] | None = Field(None)
|
62
|
+
env_files: list[str] | None = Field(None)
|
63
|
+
python_dependencies: list[str] | None = Field(None)
|
64
|
+
ts_dependencies: dict[str, str] | None = Field(None)
|
65
|
+
|
66
|
+
@model_validator(mode="before")
|
67
|
+
@classmethod
|
68
|
+
def validate_fields(cls, data: Any) -> Any:
|
69
|
+
if isinstance(data, dict):
|
70
|
+
if "path" in data and "import-path" not in data: # pragma: no cover
|
71
|
+
warnings.warn(
|
72
|
+
"The 'path' field is deprecated. Use 'import-path' instead.",
|
73
|
+
DeprecationWarning,
|
74
|
+
)
|
75
|
+
data["import-path"] = data["path"]
|
76
|
+
|
77
|
+
# Handle YAML aliases
|
78
|
+
if "import-path" in data:
|
79
|
+
data["import_path"] = data.pop("import-path")
|
80
|
+
if "env-files" in data:
|
81
|
+
data["env_files"] = data.pop("env-files")
|
82
|
+
if "python-dependencies" in data:
|
83
|
+
data["python_dependencies"] = data.pop("python-dependencies")
|
84
|
+
if "ts-dependencies" in data:
|
85
|
+
data["ts_dependencies"] = data.pop("ts-dependencies")
|
86
|
+
|
87
|
+
return data
|
88
|
+
|
89
|
+
|
90
|
+
class UIService(Service):
|
91
|
+
port: int | None = Field(
|
92
|
+
default=3000,
|
93
|
+
description="The TCP port to use for the nextjs server",
|
94
|
+
)
|
95
|
+
|
96
|
+
|
97
|
+
class DeploymentConfig(BaseModel):
|
98
|
+
"""Model definition mapping a deployment config file."""
|
99
|
+
|
100
|
+
model_config = ConfigDict(populate_by_name=True, extra="ignore")
|
101
|
+
|
102
|
+
name: str
|
103
|
+
default_service: str | None = Field(None)
|
104
|
+
services: dict[str, Service]
|
105
|
+
ui: UIService | None = None
|
106
|
+
|
107
|
+
@model_validator(mode="before")
|
108
|
+
@classmethod
|
109
|
+
def validate_fields(cls, data: Any) -> Any:
|
110
|
+
# Handle YAML aliases
|
111
|
+
if isinstance(data, dict):
|
112
|
+
if "control-plane" in data:
|
113
|
+
data["control_plane"] = data.pop("control-plane")
|
114
|
+
if "message-queue" in data:
|
115
|
+
data["message_queue"] = data.pop("message-queue")
|
116
|
+
if "default-service" in data:
|
117
|
+
data["default_service"] = data.pop("default-service")
|
118
|
+
|
119
|
+
return data
|
120
|
+
|
121
|
+
@classmethod
|
122
|
+
def from_yaml_bytes(cls, src: bytes) -> Self:
|
123
|
+
"""Read config data from bytes containing yaml code."""
|
124
|
+
config = yaml.safe_load(src) or {}
|
125
|
+
return cls(**config)
|
126
|
+
|
127
|
+
@classmethod
|
128
|
+
def from_yaml(cls, path: Path) -> Self:
|
129
|
+
"""Read config data from a yaml file."""
|
130
|
+
with open(path, "r") as yaml_file:
|
131
|
+
config = yaml.safe_load(yaml_file) or {}
|
132
|
+
|
133
|
+
return cls(**config)
|