llama-deploy-core 0.2.7a1__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,230 @@
1
+ from __future__ import annotations
2
+
3
+ from contextlib import asynccontextmanager
4
+ from typing import AsyncIterator, Callable, List
5
+
6
+ import httpx
7
+ from llama_deploy.core.schema import LogEvent
8
+ from llama_deploy.core.schema.deployments import (
9
+ DeploymentCreate,
10
+ DeploymentResponse,
11
+ DeploymentsListResponse,
12
+ DeploymentUpdate,
13
+ )
14
+ from llama_deploy.core.schema.git_validation import (
15
+ RepositoryValidationRequest,
16
+ RepositoryValidationResponse,
17
+ )
18
+ from llama_deploy.core.schema.projects import ProjectsListResponse, ProjectSummary
19
+ from llama_deploy.core.schema.public import VersionResponse
20
+
21
+
22
+ class BaseClient:
23
+ def __init__(
24
+ self, base_url: str, api_key: str | None = None, auth: httpx.Auth | None = None
25
+ ) -> None:
26
+ self.base_url = base_url.rstrip("/")
27
+ self.api_key = api_key
28
+
29
+ headers: dict[str, str] = {}
30
+ if api_key:
31
+ headers["Authorization"] = f"Bearer {api_key}"
32
+
33
+ self.client = httpx.AsyncClient(
34
+ base_url=self.base_url,
35
+ headers=headers,
36
+ auth=auth,
37
+ )
38
+ self.hookless_client = httpx.AsyncClient(
39
+ base_url=self.base_url, headers=headers, auth=auth
40
+ )
41
+
42
+ async def aclose(self) -> None:
43
+ await self.client.aclose()
44
+ await self.hookless_client.aclose()
45
+
46
+
47
+ class ControlPlaneClient(BaseClient):
48
+ """Unscoped client for non-project endpoints."""
49
+
50
+ @classmethod
51
+ @asynccontextmanager
52
+ async def ctx(
53
+ cls, base_url: str, api_key: str | None = None, auth: httpx.Auth | None = None
54
+ ) -> AsyncIterator[ControlPlaneClient]:
55
+ client = cls(base_url, api_key, auth)
56
+ try:
57
+ yield client
58
+ finally:
59
+ try:
60
+ await client.aclose()
61
+ except Exception:
62
+ pass
63
+
64
+ def __init__(
65
+ self, base_url: str, api_key: str | None = None, auth: httpx.Auth | None = None
66
+ ) -> None:
67
+ super().__init__(base_url, api_key, auth)
68
+
69
+ async def server_version(self) -> VersionResponse:
70
+ response = await self.client.get("/api/v1beta1/deployments-public/version")
71
+ response.raise_for_status()
72
+ return VersionResponse.model_validate(response.json())
73
+
74
+ async def list_projects(self) -> List[ProjectSummary]:
75
+ response = await self.client.get("/api/v1beta1/deployments/list-projects")
76
+ response.raise_for_status()
77
+ projects_response = ProjectsListResponse.model_validate(response.json())
78
+ return [project for project in projects_response.projects]
79
+
80
+
81
+ class ProjectClient(BaseClient):
82
+ """Project-scoped client for deployment operations."""
83
+
84
+ @classmethod
85
+ @asynccontextmanager
86
+ async def ctx(
87
+ cls,
88
+ base_url: str,
89
+ project_id: str,
90
+ api_key: str | None = None,
91
+ auth: httpx.Auth | None = None,
92
+ ) -> AsyncIterator[ProjectClient]:
93
+ client = cls(base_url, project_id, api_key, auth)
94
+ try:
95
+ yield client
96
+ finally:
97
+ try:
98
+ await client.aclose()
99
+ except Exception:
100
+ pass
101
+
102
+ def __init__(
103
+ self,
104
+ base_url: str,
105
+ project_id: str,
106
+ api_key: str | None = None,
107
+ auth: httpx.Auth | None = None,
108
+ ) -> None:
109
+ super().__init__(base_url, api_key, auth)
110
+ self.project_id = project_id
111
+
112
+ async def list_deployments(self) -> List[DeploymentResponse]:
113
+ response = await self.client.get(
114
+ "/api/v1beta1/deployments",
115
+ params={"project_id": self.project_id},
116
+ )
117
+ response.raise_for_status()
118
+ deployments_response = DeploymentsListResponse.model_validate(response.json())
119
+ return [deployment for deployment in deployments_response.deployments]
120
+
121
+ async def get_deployment(
122
+ self, deployment_id: str, include_events: bool = False
123
+ ) -> DeploymentResponse:
124
+ response = await self.client.get(
125
+ f"/api/v1beta1/deployments/{deployment_id}",
126
+ params={"project_id": self.project_id, "include_events": include_events},
127
+ )
128
+ response.raise_for_status()
129
+ return DeploymentResponse.model_validate(response.json())
130
+
131
+ async def create_deployment(
132
+ self, deployment_data: DeploymentCreate
133
+ ) -> DeploymentResponse:
134
+ response = await self.client.post(
135
+ "/api/v1beta1/deployments",
136
+ params={"project_id": self.project_id},
137
+ json=deployment_data.model_dump(exclude_none=True),
138
+ )
139
+ response.raise_for_status()
140
+ return DeploymentResponse.model_validate(response.json())
141
+
142
+ async def delete_deployment(self, deployment_id: str) -> None:
143
+ response = await self.client.delete(
144
+ f"/api/v1beta1/deployments/{deployment_id}",
145
+ params={"project_id": self.project_id},
146
+ )
147
+ response.raise_for_status()
148
+
149
+ async def update_deployment(
150
+ self,
151
+ deployment_id: str,
152
+ update_data: DeploymentUpdate,
153
+ ) -> DeploymentResponse:
154
+ response = await self.client.patch(
155
+ f"/api/v1beta1/deployments/{deployment_id}",
156
+ params={"project_id": self.project_id},
157
+ json=update_data.model_dump(),
158
+ )
159
+ response.raise_for_status()
160
+ return DeploymentResponse.model_validate(response.json())
161
+
162
+ async def validate_repository(
163
+ self,
164
+ repo_url: str,
165
+ deployment_id: str | None = None,
166
+ pat: str | None = None,
167
+ ) -> RepositoryValidationResponse:
168
+ response = await self.client.post(
169
+ "/api/v1beta1/deployments/validate-repository",
170
+ params={"project_id": self.project_id},
171
+ json=RepositoryValidationRequest(
172
+ repository_url=repo_url,
173
+ deployment_id=deployment_id,
174
+ pat=pat,
175
+ ).model_dump(),
176
+ )
177
+ response.raise_for_status()
178
+ return RepositoryValidationResponse.model_validate(response.json())
179
+
180
+ async def stream_deployment_logs(
181
+ self,
182
+ deployment_id: str,
183
+ *,
184
+ include_init_containers: bool = False,
185
+ since_seconds: int | None = None,
186
+ tail_lines: int | None = None,
187
+ ) -> AsyncIterator[LogEvent]:
188
+ """Stream logs as LogEvent items from the control plane using SSE.
189
+
190
+ Yields `LogEvent` models until the stream ends (e.g., rollout completes).
191
+ """
192
+ params: dict[str, object] = {
193
+ "project_id": self.project_id,
194
+ "include_init_containers": include_init_containers,
195
+ }
196
+ if since_seconds is not None:
197
+ params["since_seconds"] = since_seconds
198
+ if tail_lines is not None:
199
+ params["tail_lines"] = tail_lines
200
+
201
+ url = f"/api/v1beta1/deployments/{deployment_id}/logs"
202
+ headers = {"Accept": "text/event-stream"}
203
+
204
+ async with self.hookless_client.stream(
205
+ "GET", url, params=params, headers=headers, timeout=None
206
+ ) as response:
207
+ response.raise_for_status()
208
+
209
+ event_name: str | None = None
210
+ data_lines: list[str] = []
211
+ async for line in response.aiter_lines():
212
+ if line is None:
213
+ continue
214
+ line = line.decode() if isinstance(line, (bytes, bytearray)) else line
215
+ if line.startswith("event:"):
216
+ event_name = line[len("event:") :].strip()
217
+ elif line.startswith("data:"):
218
+ data_lines.append(line[len("data:") :].lstrip())
219
+ elif line.strip() == "":
220
+ if event_name == "log" and data_lines:
221
+ data_str = "\n".join(data_lines)
222
+ try:
223
+ yield LogEvent.model_validate_json(data_str)
224
+ except Exception:
225
+ pass
226
+ event_name = None
227
+ data_lines = []
228
+
229
+
230
+ Closer = Callable[[], None]
@@ -1 +1 @@
1
- DEFAULT_DEPLOYMENT_FILE_PATH = "llama_deploy.yaml"
1
+ DEFAULT_DEPLOYMENT_FILE_PATH = "."
@@ -0,0 +1,415 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import tomllib
5
+ from pathlib import Path
6
+ from typing import Any, TypeVar
7
+
8
+ import yaml
9
+ from llama_deploy.core.git.git_util import get_git_root, is_git_repo
10
+ from llama_deploy.core.path_util import validate_path_traversal
11
+ from pydantic import BaseModel, ConfigDict, Field, ValidationError, model_validator
12
+
13
+ DEFAULT_DEPLOYMENT_NAME = "default"
14
+
15
+
16
+ def read_deployment_config_from_git_root_or_cwd(
17
+ cwd: Path, config_path: Path
18
+ ) -> "DeploymentConfig":
19
+ """
20
+ Read the deployment config from the git root or cwd.
21
+ """
22
+ if is_git_repo():
23
+ git_root = get_git_root()
24
+ relative_cwd_path = cwd.relative_to(git_root)
25
+ return read_deployment_config(git_root, relative_cwd_path / config_path)
26
+ return read_deployment_config(cwd, config_path)
27
+
28
+
29
+ def read_deployment_config(source_root: Path, config_path: Path) -> "DeploymentConfig":
30
+ """
31
+ Read the deployment config from the config directory.
32
+
33
+ - first checks for a llamadeploy.toml in the config_path
34
+ - then checks for a tool config in the pyproject.toml
35
+ - then check for a legacy yaml config (if config_path is a file, uses that, otherwise uses the config_path/llama_deploy.yaml)
36
+ - based on what was resolved here, discovers the package.json, if any ui, and resolves its values from the package.json
37
+
38
+ Args:
39
+ source_root: path to the root of the source code. References should not exit this directory.
40
+ config_path: path to a deployment config file, or directory containing a deployment config file.
41
+
42
+ Returns:
43
+ DeploymentConfig: the deployment config
44
+ """
45
+ config_file: Path | None = None
46
+ if (source_root / config_path).is_file():
47
+ config_file = Path(config_path.name)
48
+ if str(config_file) in {"llama_deploy.toml", "pyproject.toml"}:
49
+ config_file = None
50
+ config_path = config_path.parent
51
+ local_toml_path = source_root / config_path / "llama_deploy.toml"
52
+ pyproject_path = source_root / config_path / "pyproject.toml"
53
+ toml_config: DeploymentConfig = DeploymentConfig()
54
+ # local TOML format
55
+ if local_toml_path.exists():
56
+ with open(local_toml_path, "rb") as toml_file:
57
+ toml_data = tomllib.load(toml_file)
58
+ if isinstance(toml_data, dict):
59
+ toml_config = DeploymentConfig.model_validate(toml_data)
60
+ # pyproject.toml format
61
+ elif pyproject_path.exists():
62
+ with open(pyproject_path, "rb") as pyproject_file:
63
+ pyproject = tomllib.load(pyproject_file)
64
+ tool = pyproject.get("tool", {})
65
+ project_name: str | None = None
66
+ project_metadata = pyproject.get("project", {})
67
+ if isinstance(project_metadata, dict):
68
+ name = project_metadata.get("name")
69
+ if isinstance(name, str):
70
+ project_name = name
71
+ if isinstance(tool, dict):
72
+ llama_deploy = tool.get("llamadeploy", {})
73
+ if isinstance(llama_deploy, dict):
74
+ if "name" not in llama_deploy:
75
+ llama_deploy["name"] = project_name
76
+ toml_config = DeploymentConfig.model_validate(llama_deploy)
77
+ # legacy yaml format, (and why not support yaml in the new format too, since this is doing everything all the ways)
78
+ if toml_config.has_no_workflows():
79
+ yaml_path = (
80
+ source_root / config_path / (config_file or Path("llama_deploy.yaml"))
81
+ )
82
+ if yaml_path.exists():
83
+ with open(yaml_path, "r", encoding="utf-8") as yaml_file:
84
+ yaml_loaded = yaml.safe_load(yaml_file) or {}
85
+
86
+ old_config: DeploymentConfig | None = None
87
+ new_config: DeploymentConfig | None = None
88
+ try:
89
+ old_config = DeprecatedDeploymentConfig.model_validate(
90
+ yaml_loaded
91
+ ).to_deployment_config()
92
+ except ValidationError:
93
+ pass
94
+ try:
95
+ new_config = DeploymentConfig.model_validate(yaml_loaded)
96
+ except ValidationError:
97
+ pass
98
+ loaded: DeploymentConfig | None = new_config
99
+ if (
100
+ old_config is not None
101
+ and old_config.is_valid()
102
+ and (new_config is None or not new_config.is_valid())
103
+ ):
104
+ loaded = old_config
105
+ if loaded is not None:
106
+ toml_config = toml_config.merge_config(loaded)
107
+
108
+ # package.json format
109
+ if toml_config.ui is not None:
110
+ package_json_path = (
111
+ source_root / config_path / toml_config.ui.directory / "package.json"
112
+ )
113
+ if package_json_path.exists():
114
+ with open(package_json_path, "r", encoding="utf-8") as package_json_file:
115
+ package_json = json.load(package_json_file)
116
+ if isinstance(package_json, dict):
117
+ # Standard packageManager fallback, e.g. "pnpm@9.0.0" -> "pnpm"
118
+ pkg_manager_value = package_json.get("packageManager")
119
+ pkg_manager_name: str | None = None
120
+ if isinstance(pkg_manager_value, str) and pkg_manager_value:
121
+ pkg_manager_name = pkg_manager_value.split("@", 1)[0] or None
122
+
123
+ llama_deploy = package_json.get("llamadeploy", {})
124
+
125
+ if isinstance(llama_deploy, dict):
126
+ # Prepare payload without leaking Path objects into Pydantic
127
+ ui_dir = toml_config.ui.directory if toml_config.ui else None
128
+ ui_payload: dict[str, object] = {**llama_deploy}
129
+ if "directory" not in ui_payload and ui_dir is not None:
130
+ ui_payload["directory"] = ui_dir
131
+ if (
132
+ "package_manager" not in ui_payload
133
+ and pkg_manager_name is not None
134
+ ):
135
+ ui_payload["package_manager"] = pkg_manager_name
136
+
137
+ ui_config = UIConfig.model_validate(ui_payload)
138
+ if ui_config.build_output_dir is not None:
139
+ ui_config.build_output_dir = str(
140
+ Path(toml_config.ui.directory) / ui_config.build_output_dir
141
+ )
142
+ toml_config.ui = ui_config.merge_config(toml_config.ui)
143
+
144
+ if toml_config.ui is not None:
145
+ validate_path_traversal(
146
+ config_path / toml_config.ui.directory, source_root, "ui_source"
147
+ )
148
+ if toml_config.ui.build_output_dir:
149
+ validate_path_traversal(
150
+ config_path / toml_config.ui.build_output_dir,
151
+ source_root,
152
+ "ui_build_output_dir",
153
+ )
154
+
155
+ return toml_config
156
+
157
+
158
+ def resolve_config_parent(root: Path, deployment_path: Path) -> Path:
159
+ path = root / deployment_path
160
+ if path.is_file():
161
+ return path.parent
162
+ else:
163
+ return path
164
+
165
+
166
+ DEFAULT_UI_PACKAGE_MANAGER = "npm"
167
+ DEFAULT_UI_BUILD_COMMAND = "build"
168
+ DEFAULT_UI_SERVE_COMMAND = "dev"
169
+ DEFAULT_UI_PROXY_PORT = 4502
170
+
171
+
172
+ class DeploymentConfig(BaseModel):
173
+ name: str = Field(
174
+ default=DEFAULT_DEPLOYMENT_NAME,
175
+ description="The url safe path name of the deployment.",
176
+ )
177
+ llama_cloud: bool = Field(
178
+ default=False,
179
+ description="If true, serving locally expects Llama Cloud access and will inject credentials when possible.",
180
+ )
181
+ app: str | None = Field(
182
+ None,
183
+ description="A full bundle of all workflows as an 'app'. \"path.to_import:app_name\"",
184
+ )
185
+ workflows: dict[str, str] = Field(
186
+ default_factory=dict,
187
+ description='Deprecated: A map of workflow names to their import paths. "nice_name": "path.to_import:workflow_name"',
188
+ )
189
+ env_files: list[str] = Field(
190
+ default_factory=list,
191
+ description="The environment files to load. Defaults to ['.env']",
192
+ )
193
+ env: dict[str, str] = Field(
194
+ default_factory=dict,
195
+ description="Arbitrary environment variables to set. Defaults to {}",
196
+ )
197
+ ui: UIConfig | None = Field(
198
+ None,
199
+ description="The UI configuration.",
200
+ )
201
+
202
+ def merge_config(self, config: "DeploymentConfig") -> "DeploymentConfig":
203
+ """Merge the config with another config."""
204
+
205
+ return DeploymentConfig(
206
+ name=_pick_non_default(self.name, config.name, "default"),
207
+ llama_cloud=self.llama_cloud or config.llama_cloud,
208
+ app=self.app or config.app,
209
+ workflows={**self.workflows, **config.workflows},
210
+ env_files=list(set(self.env_files + config.env_files)),
211
+ env={**self.env, **config.env},
212
+ ui=self.ui.merge_config(config.ui)
213
+ if config.ui is not None and self.ui is not None
214
+ else self.ui or config.ui,
215
+ )
216
+
217
+ def has_no_workflows(self) -> bool:
218
+ """Check if the config has no workflows."""
219
+ return len(self.workflows) == 0 and self.app is None
220
+
221
+ def has_both_app_and_workflows(self) -> bool:
222
+ """Check if the config has both app and workflows."""
223
+ return self.app is not None and len(self.workflows) > 0
224
+
225
+ def is_valid(self) -> bool:
226
+ """Check if the config is valid."""
227
+ try:
228
+ self.validate()
229
+ return True
230
+ except ValueError:
231
+ return False
232
+
233
+ def validate(self) -> None:
234
+ """Validate the config."""
235
+ if self.has_no_workflows():
236
+ raise ValueError("Config must have at least one workflow.")
237
+ if self.has_both_app_and_workflows():
238
+ raise ValueError("Config cannot have both app and workflows configured.")
239
+
240
+ def build_output_path(self) -> Path | None:
241
+ """get the build output path, or default to the ui directory/dist"""
242
+ if self.ui is None:
243
+ return None
244
+ return (
245
+ Path(self.ui.build_output_dir)
246
+ if self.ui.build_output_dir
247
+ else Path(self.ui.directory) / "dist"
248
+ )
249
+
250
+
251
+ T = TypeVar("T")
252
+
253
+
254
+ def _pick_non_default(a: T, b: T, default: T) -> T:
255
+ if a != default:
256
+ return a
257
+ return b or default
258
+
259
+
260
+ class UIConfig(BaseModel):
261
+ directory: str = Field(
262
+ ...,
263
+ description="The directory containing the UI, relative to the pyproject.toml directory",
264
+ )
265
+ build_output_dir: str | None = Field(
266
+ None,
267
+ description="The directory containing the built UI, relative to the pyproject.toml directory. Defaults to 'dist' relative to the ui_directory, if defined",
268
+ )
269
+ package_manager: str = Field(
270
+ DEFAULT_UI_PACKAGE_MANAGER,
271
+ description=f"The package manager to use to build the UI. Defaults to '{DEFAULT_UI_PACKAGE_MANAGER}'",
272
+ )
273
+ build_command: str = Field(
274
+ DEFAULT_UI_BUILD_COMMAND,
275
+ description=f"The npm script command to build the UI. Defaults to '{DEFAULT_UI_BUILD_COMMAND}' if not specified",
276
+ )
277
+ serve_command: str = Field(
278
+ DEFAULT_UI_SERVE_COMMAND,
279
+ description=f"The command to serve the UI. Defaults to '{DEFAULT_UI_SERVE_COMMAND}' if not specified",
280
+ )
281
+ proxy_port: int = Field(
282
+ DEFAULT_UI_PROXY_PORT,
283
+ description=f"The port to proxy the UI to. Defaults to '{DEFAULT_UI_PROXY_PORT}' if not specified",
284
+ )
285
+
286
+ def merge_config(self, config: "UIConfig") -> "UIConfig":
287
+ """Merge the config with the default config."""
288
+
289
+ return UIConfig(
290
+ directory=self.directory,
291
+ build_output_dir=self.build_output_dir or config.build_output_dir,
292
+ package_manager=_pick_non_default(
293
+ self.package_manager, config.package_manager, DEFAULT_UI_PACKAGE_MANAGER
294
+ ),
295
+ build_command=_pick_non_default(
296
+ self.build_command, config.build_command, DEFAULT_UI_BUILD_COMMAND
297
+ ),
298
+ serve_command=_pick_non_default(
299
+ self.serve_command, config.serve_command, DEFAULT_UI_SERVE_COMMAND
300
+ ),
301
+ proxy_port=_pick_non_default(
302
+ self.proxy_port, config.proxy_port, DEFAULT_UI_PROXY_PORT
303
+ ),
304
+ )
305
+
306
+
307
+ class ServiceSourceV0(BaseModel):
308
+ """Configuration for where to load the workflow or other source. Path is relative to the config file its declared within."""
309
+
310
+ location: str
311
+
312
+ @model_validator(mode="before")
313
+ @classmethod
314
+ def validate_fields(cls, data: Any) -> Any:
315
+ if isinstance(data, dict):
316
+ if "name" in data:
317
+ data["location"] = data.pop("name")
318
+ return data
319
+
320
+
321
+ class DerecatedService(BaseModel):
322
+ """Configuration for a single service."""
323
+
324
+ source: ServiceSourceV0 | None = Field(None)
325
+ import_path: str | None = Field(None)
326
+ env: dict[str, str] | None = Field(None)
327
+ env_files: list[str] | None = Field(None)
328
+ python_dependencies: list[str] | None = Field(None)
329
+
330
+ @model_validator(mode="before")
331
+ @classmethod
332
+ def validate_fields(cls, data: Any) -> Any:
333
+ if isinstance(data, dict):
334
+ # Handle YAML aliases
335
+ if "path" in data:
336
+ data["import_path"] = data.pop("path")
337
+ if "import-path" in data:
338
+ data["import_path"] = data.pop("import-path")
339
+ if "env-files" in data:
340
+ data["env_files"] = data.pop("env-files")
341
+
342
+ return data
343
+
344
+ def module_location(self) -> tuple[str, str]:
345
+ """
346
+ Parses the import path, and target, discarding legacy file path portion, if any
347
+
348
+ "src/module.workflow:my_workflow" -> ("module.workflow", "my_workflow")
349
+ """
350
+ if self.import_path is None:
351
+ raise ValueError("import_path is required to compute module_location")
352
+ module_name, workflow_name = self.import_path.split(":")
353
+ return Path(module_name).name, workflow_name
354
+
355
+
356
+ class DeprecatedDeploymentConfig(BaseModel):
357
+ """Model definition mapping a deployment config file."""
358
+
359
+ model_config = ConfigDict(populate_by_name=True, extra="ignore")
360
+
361
+ name: str
362
+ default_service: str | None = Field(None)
363
+ services: dict[str, DerecatedService]
364
+ ui: DerecatedService | None = None
365
+
366
+ @model_validator(mode="before")
367
+ @classmethod
368
+ def validate_fields(cls, data: Any) -> Any:
369
+ # Handle YAML aliases
370
+ if isinstance(data, dict):
371
+ if "default-service" in data:
372
+ data["default_service"] = data.pop("default-service")
373
+
374
+ return data
375
+
376
+ @classmethod
377
+ def from_yaml(
378
+ cls,
379
+ path: Path,
380
+ ) -> "DeprecatedDeploymentConfig":
381
+ """Read config data from a yaml file."""
382
+ with open(path, "r", encoding="utf-8") as yaml_file:
383
+ config = yaml.safe_load(yaml_file) or {}
384
+
385
+ instance = cls.model_validate(config)
386
+ return instance
387
+
388
+ def to_deployment_config(self) -> DeploymentConfig:
389
+ """Convert the deployment config to a DeploymentConfig."""
390
+ workflows = {}
391
+ env_files = []
392
+ env = {}
393
+ ui_directory: str | None = None
394
+ for service_name, service in self.services.items():
395
+ if service.import_path:
396
+ path, name = service.module_location()
397
+ workflows[service_name] = f"{path}:{name}"
398
+ if service.env_files:
399
+ env_files.extend(service.env_files)
400
+ if service.env:
401
+ env.update(service.env)
402
+ if self.default_service:
403
+ workflows["default"] = workflows[self.default_service]
404
+ env_files = list(set(env_files))
405
+
406
+ if self.ui:
407
+ ui_directory = self.ui.source.location
408
+
409
+ return DeploymentConfig(
410
+ name=self.name,
411
+ workflows=workflows,
412
+ env_files=env_files,
413
+ env=env,
414
+ ui=UIConfig(directory=ui_directory) if ui_directory else None,
415
+ )