llama-deploy-core 0.2.7a1__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,11 +3,11 @@ Git utilities for the purpose of exploring, cloning, and parsing llama-deploy re
3
3
  Responsibilities are lower level git access, as well as some application specific config parsing.
4
4
  """
5
5
 
6
- from dataclasses import dataclass
7
6
  import re
8
7
  import subprocess
9
- from pathlib import Path
10
8
  import tempfile
9
+ from dataclasses import dataclass
10
+ from pathlib import Path
11
11
 
12
12
  import yaml
13
13
 
@@ -46,20 +46,42 @@ def parse_github_repo_url(repo_url: str) -> tuple[str, str]:
46
46
  def inject_basic_auth(url: str, basic_auth: str | None = None) -> str:
47
47
  """Inject basic auth into a URL if provided"""
48
48
  if basic_auth and "://" in url and "@" not in url:
49
- url = url.replace("https://", f"https://{basic_auth}@")
49
+ scheme, rest = url.split("://", 1)
50
+ url = f"{scheme}://{basic_auth}@{rest}"
50
51
  return url
51
52
 
52
53
 
53
- def _run_process(args: list[str], cwd: str | None = None) -> str:
54
- """Run a process and raise an exception if it fails"""
55
- result = subprocess.run(
56
- args, cwd=cwd, capture_output=True, text=True, check=True, timeout=30
57
- )
54
+ def _run_process(args: list[str], cwd: str | None = None, timeout: int = 30) -> str:
55
+ """Run a process and raise a GitAccessError with detailed output if it fails.
56
+
57
+ The error message includes the command, return code, working directory,
58
+ and both stdout and stderr to aid debugging (e.g., git fetch failures).
59
+ """
60
+ try:
61
+ result = subprocess.run(
62
+ args, cwd=cwd, capture_output=True, text=True, check=False, timeout=timeout
63
+ )
64
+ except subprocess.TimeoutExpired:
65
+ cmd = " ".join(args)
66
+ where = f" (cwd={cwd})" if cwd else ""
67
+ raise GitAccessError(f"Command timed out after {timeout}s: {cmd}{where}")
68
+
58
69
  if result.returncode != 0:
59
- raise subprocess.CalledProcessError(
60
- result.returncode, args, result.stdout, result.stderr
70
+ cmd = " ".join(args)
71
+ where = f" (cwd={cwd})" if cwd else ""
72
+ stdout = (result.stdout or "").strip()
73
+ stderr = (result.stderr or "").strip()
74
+ details = []
75
+ if stdout:
76
+ details.append(f"stdout:\n{stdout}")
77
+ if stderr:
78
+ details.append(f"stderr:\n{stderr}")
79
+ detail_block = "\n\n".join(details) if details else "(no output)"
80
+ raise GitAccessError(
81
+ f"Command failed with exit code {result.returncode}: {cmd}{where}\n{detail_block}"
61
82
  )
62
- return result.stdout.strip()
83
+
84
+ return (result.stdout or "").strip()
63
85
 
64
86
 
65
87
  class GitAccessError(Exception):
@@ -120,6 +142,7 @@ def clone_repo(
120
142
  if resolved_branch:
121
143
  git_ref = resolved_branch
122
144
  else:
145
+ # Try exact tag match; if it fails, we just ignore and proceed
123
146
  try:
124
147
  resolved_tag = _run_process(
125
148
  ["git", "describe", "--tags", "--exact-match"],
@@ -127,34 +150,25 @@ def clone_repo(
127
150
  )
128
151
  if resolved_tag:
129
152
  git_ref = resolved_tag
130
- except subprocess.CalledProcessError:
153
+ except GitAccessError:
131
154
  pass
132
155
  else: # Checkout the ref
133
156
  if did_exist:
134
- try:
135
- _run_process(
136
- ["git", "fetch", "origin"], cwd=str(dest_dir.absolute())
137
- )
138
- except subprocess.CalledProcessError:
139
- raise GitAccessError("Failed to resolve git reference")
140
- try:
141
157
  _run_process(
142
- ["git", "checkout", git_ref], cwd=str(dest_dir.absolute())
158
+ ["git", "fetch", "origin"], cwd=str(dest_dir.absolute())
143
159
  )
144
- except subprocess.CalledProcessError as e:
145
- # Check error message to determine if it's a network issue or ref not found
146
- if "unable to access" in str(
147
- e.stderr
148
- ) or "fatal: unable to access repository" in str(e.stderr):
149
- raise GitAccessError("Failed to resolve git reference")
150
- else:
151
- raise GitAccessError(f"Commit SHA '{git_ref}' not found")
160
+ _run_process(
161
+ ["git", "checkout", git_ref, "--"], cwd=str(dest_dir.absolute())
162
+ )
152
163
  # if no ref, stay on whatever the clone gave us/current commit
153
164
  # return the resolved sha
154
165
  resolved_sha = _run_process(
155
166
  ["git", "rev-parse", "HEAD"], cwd=str(dest_dir.absolute())
156
167
  ).strip()
157
168
  return GitCloneResult(git_sha=resolved_sha, git_ref=git_ref)
169
+ except GitAccessError:
170
+ # Re-raise enriched errors from _run_process directly
171
+ raise
158
172
  except subprocess.TimeoutExpired:
159
173
  raise GitAccessError("Timeout while cloning repository")
160
174
 
@@ -191,7 +205,7 @@ def validate_deployment_file(repo_dir: Path, deployment_file_path: str) -> bool:
191
205
  return False
192
206
 
193
207
 
194
- async def validate_git_public_access(repository_url: str) -> bool:
208
+ def validate_git_public_access(repository_url: str) -> bool:
195
209
  """Check if a git repository is publicly accessible using git ls-remote."""
196
210
 
197
211
  try:
@@ -207,7 +221,7 @@ async def validate_git_public_access(repository_url: str) -> bool:
207
221
  return False
208
222
 
209
223
 
210
- async def validate_git_credential_access(repository_url: str, basic_auth: str) -> bool:
224
+ def validate_git_credential_access(repository_url: str, basic_auth: str) -> bool:
211
225
  """Check if a credential provides access to a git repository."""
212
226
 
213
227
  auth_url = inject_basic_auth(repository_url, basic_auth)
@@ -222,3 +236,101 @@ async def validate_git_credential_access(repository_url: str, basic_auth: str) -
222
236
  return result.returncode == 0
223
237
  except (subprocess.TimeoutExpired, Exception):
224
238
  return False
239
+
240
+
241
+ def is_git_repo() -> bool:
242
+ """
243
+ checks if the cwd is a git repo
244
+ """
245
+ try:
246
+ _run_process(["git", "status"])
247
+ return True
248
+ except GitAccessError:
249
+ return False
250
+
251
+
252
+ def list_remotes() -> list[str]:
253
+ """
254
+ list the remote urls for the current git repo
255
+ """
256
+ result = _run_process(["git", "remote", "-v"])
257
+ return [line.split()[1] for line in result.splitlines()]
258
+
259
+
260
+ def get_current_branch() -> str | None:
261
+ """
262
+ get the current branch for the current git repo
263
+ """
264
+ result = _run_process(["git", "branch", "--show-current"])
265
+ return result.strip() if result.strip() else None
266
+
267
+
268
+ def get_git_root() -> Path:
269
+ """
270
+ get the root of the current git repo
271
+ """
272
+ result = _run_process(["git", "rev-parse", "--show-toplevel"])
273
+ return Path(result.strip())
274
+
275
+
276
+ def working_tree_has_changes() -> bool:
277
+ """
278
+ Returns True if the working tree has uncommitted or untracked changes.
279
+ Safe to call; returns False if unable to determine.
280
+ """
281
+ try:
282
+ result = subprocess.run(
283
+ ["git", "status", "--porcelain"],
284
+ capture_output=True,
285
+ text=True,
286
+ check=False,
287
+ timeout=30,
288
+ )
289
+ return bool((result.stdout or "").strip())
290
+ except Exception:
291
+ return False
292
+
293
+
294
+ def get_unpushed_commits_count() -> int | None:
295
+ """
296
+ Returns the number of local commits ahead of the upstream.
297
+
298
+ - Returns an integer >= 0 when an upstream is configured
299
+ - Returns None when no upstream is configured
300
+ - Returns 0 if the status cannot be determined
301
+ """
302
+ try:
303
+ upstream = subprocess.run(
304
+ [
305
+ "git",
306
+ "rev-parse",
307
+ "--abbrev-ref",
308
+ "--symbolic-full-name",
309
+ "@{u}",
310
+ ],
311
+ capture_output=True,
312
+ text=True,
313
+ check=False,
314
+ timeout=30,
315
+ )
316
+ if upstream.returncode != 0:
317
+ return None
318
+
319
+ ahead_behind = subprocess.run(
320
+ ["git", "rev-list", "--left-right", "--count", "@{u}...HEAD"],
321
+ capture_output=True,
322
+ text=True,
323
+ check=False,
324
+ timeout=30,
325
+ )
326
+ output = (ahead_behind.stdout or "").strip()
327
+ if not output:
328
+ return 0
329
+ parts = output.split()
330
+ if len(parts) >= 2:
331
+ # format: behind ahead
332
+ ahead_count = int(parts[1])
333
+ return ahead_count
334
+ return 0
335
+ except Exception:
336
+ return 0
@@ -0,0 +1,196 @@
1
+ """Iterator utilities for buffering, sorting, and debouncing streams."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import asyncio
6
+ import time
7
+ from typing import Any, AsyncGenerator, Callable, TypeVar
8
+
9
+ from typing_extensions import Literal
10
+
11
+ T = TypeVar("T")
12
+
13
+
14
+ async def debounced_sorted_prefix(
15
+ inner: AsyncGenerator[T, None],
16
+ *,
17
+ key: Callable[[T], Any],
18
+ debounce_seconds: float = 0.1,
19
+ max_window_seconds: float = 0.1,
20
+ ) -> AsyncGenerator[T, None]:
21
+ """Yield a stream where the initial burst is sorted, then passthrough.
22
+
23
+ Behavior:
24
+ - Buffer early items and sort them by the provided key.
25
+ - Flush the buffer when either:
26
+ - No new item arrives for `debounce_seconds`, or
27
+ - `max_window_seconds` elapses from the first buffered item, or
28
+ - After the first flush, subsequent items are yielded passthrough, in arrival order.
29
+
30
+ This async variant uses an asyncio.Queue and a background task to pump `inner`.
31
+ """
32
+
33
+ buffer: list[T] = []
34
+ debouncer = Debouncer(debounce_seconds, max_window_seconds)
35
+ merged = merge_generators(inner, debouncer.aiter())
36
+
37
+ async for item in merged:
38
+ if item == "__COMPLETE__":
39
+ buffer.sort(key=key)
40
+ for item in buffer:
41
+ yield item
42
+ buffer = []
43
+ else:
44
+ if debouncer.is_complete:
45
+ yield item
46
+ else:
47
+ debouncer.extend_window()
48
+ buffer.append(item)
49
+
50
+
51
+ COMPLETE = Literal["__COMPLETE__"]
52
+
53
+
54
+ async def merge_generators(
55
+ *generators: AsyncGenerator[T, None],
56
+ stop_on_first_completion: bool = False,
57
+ ) -> AsyncGenerator[T, None]:
58
+ """
59
+ Merge multiple async iterators into a single async iterator, yielding items as
60
+ soon as any source produces them.
61
+
62
+ - If stop_on_first_completion is False (default), continues until all inputs are exhausted.
63
+ - If stop_on_first_completion is True, stops as soon as any input completes.
64
+ - Propagates exceptions from any input immediately.
65
+ """
66
+ if not generators:
67
+ return
68
+
69
+ active_generators: dict[int, AsyncGenerator[T, None]] = {
70
+ index: gen for index, gen in enumerate(generators)
71
+ }
72
+
73
+ next_item_tasks: dict[int, asyncio.Task[T]] = {}
74
+ exception_to_raise: BaseException | None = None
75
+ stopped_on_first_completion = False
76
+
77
+ # Prime one pending task per generator to maintain fairness
78
+ for index, gen in active_generators.items():
79
+ next_item_tasks[index] = asyncio.create_task(anext(gen))
80
+
81
+ try:
82
+ while next_item_tasks and exception_to_raise is None:
83
+ done, _ = await asyncio.wait(
84
+ set(next_item_tasks.values()),
85
+ return_when=asyncio.FIRST_COMPLETED,
86
+ )
87
+
88
+ for finished in done:
89
+ # Locate which generator this task belonged to
90
+ task_index: int | None = None
91
+ for index, task in next_item_tasks.items():
92
+ if task is finished:
93
+ task_index = index
94
+ break
95
+
96
+ if task_index is None:
97
+ # Should not happen, but continue defensively
98
+ continue
99
+
100
+ try:
101
+ value = finished.result()
102
+ except StopAsyncIteration:
103
+ # Generator exhausted
104
+ if stop_on_first_completion:
105
+ stopped_on_first_completion = True
106
+ # Break out of the inner loop; the outer loop will
107
+ # observe the stop flag and exit to the finally block
108
+ # where pending tasks are cancelled and generators closed.
109
+ break
110
+ else:
111
+ next_item_tasks.pop(task_index, None)
112
+ active_generators.pop(task_index, None)
113
+ continue
114
+ except Exception as exc: # noqa: BLE001 - propagate specific generator error
115
+ exception_to_raise = exc
116
+ break
117
+ else:
118
+ # Remove the finished task before yielding
119
+ next_item_tasks.pop(task_index, None)
120
+ yield value
121
+ # Schedule the next item fetch for this generator
122
+ gen = active_generators.get(task_index)
123
+ if gen is not None:
124
+ next_item_tasks[task_index] = asyncio.create_task(anext(gen))
125
+ # If we are configured to stop on first completion and observed one,
126
+ # exit the outer loop to perform cleanup in the finally block.
127
+ if stopped_on_first_completion:
128
+ break
129
+ finally:
130
+ # Ensure we do not leak tasks or open generators
131
+ for task in next_item_tasks.values():
132
+ task.cancel()
133
+ if next_item_tasks:
134
+ try:
135
+ await asyncio.gather(*next_item_tasks.values(), return_exceptions=True)
136
+ except Exception:
137
+ pass
138
+ for gen in active_generators.values():
139
+ try:
140
+ await gen.aclose()
141
+ except Exception:
142
+ pass
143
+
144
+ if exception_to_raise is not None:
145
+ raise exception_to_raise
146
+ if stopped_on_first_completion:
147
+ return
148
+
149
+
150
+ class Debouncer:
151
+ """
152
+ Continually extends a complete time while extend is called, up to a max window.
153
+ Exposes methods that notify on completion
154
+ """
155
+
156
+ def __init__(
157
+ self,
158
+ debounce_seconds: float = 0.1,
159
+ max_window_seconds: float = 1,
160
+ get_time: Callable[[], float] = time.monotonic,
161
+ ):
162
+ self.debounce_seconds = debounce_seconds
163
+ self.max_window_seconds = max_window_seconds
164
+ self.complete_signal = asyncio.Event()
165
+ self.get_time = get_time
166
+ self.start_time = self.get_time()
167
+ self.complete_time = self.start_time + self.debounce_seconds
168
+ self.max_complete_time = self.start_time + self.max_window_seconds
169
+ asyncio.create_task(self._loop())
170
+
171
+ async def _loop(self):
172
+ while not self.complete_signal.is_set():
173
+ now = self.get_time()
174
+ remaining = min(self.complete_time, self.max_complete_time) - now
175
+ if remaining <= 0:
176
+ self.complete_signal.set()
177
+ else:
178
+ await asyncio.sleep(remaining)
179
+
180
+ @property
181
+ def is_complete(self) -> bool:
182
+ return self.complete_signal.is_set()
183
+
184
+ def extend_window(self) -> None:
185
+ """Mark a new item has arrived, extending the debounce window."""
186
+ now = self.get_time()
187
+ self.complete_time = now + self.debounce_seconds
188
+
189
+ async def wait(self) -> None:
190
+ """Wait for the debounce window to expire, or the max window to elapse."""
191
+ await self.complete_signal.wait()
192
+
193
+ async def aiter(self) -> AsyncGenerator[COMPLETE, None]:
194
+ """Yield a stream that emits an element when the wait event occurs."""
195
+ await self.wait()
196
+ yield "__COMPLETE__"
@@ -0,0 +1,24 @@
1
+ from pathlib import Path
2
+
3
+
4
+ def validate_path_traversal(
5
+ path: Path, source_root: Path, path_type: str = "path"
6
+ ) -> None:
7
+ """Validates that a path is within the source root to prevent path traversal attacks.
8
+
9
+ Args:
10
+ path: The path to validate
11
+ source_root: The root directory that paths should be relative to
12
+ path_type: Description of the path type for error messages
13
+
14
+ Raises:
15
+ DeploymentError: If the path is outside the source root
16
+ """
17
+ resolved_path = (source_root / path).resolve()
18
+ resolved_source_root = source_root.resolve()
19
+
20
+ if not resolved_path.is_relative_to(resolved_source_root):
21
+ msg = (
22
+ f"{path_type} {path} is not a subdirectory of the source root {source_root}"
23
+ )
24
+ raise RuntimeError(msg)
File without changes
@@ -2,17 +2,20 @@ from .base import Base
2
2
  from .deployments import (
3
3
  DeploymentCreate,
4
4
  DeploymentResponse,
5
- DeploymentUpdate,
6
5
  DeploymentsListResponse,
6
+ DeploymentUpdate,
7
+ LlamaDeploymentPhase,
7
8
  LlamaDeploymentSpec,
9
+ LogEvent,
8
10
  apply_deployment_update,
9
- LlamaDeploymentPhase,
10
11
  )
11
- from .git_validation import RepositoryValidationResponse, RepositoryValidationRequest
12
- from .projects import ProjectSummary, ProjectsListResponse
12
+ from .git_validation import RepositoryValidationRequest, RepositoryValidationResponse
13
+ from .projects import ProjectsListResponse, ProjectSummary
14
+ from .public import VersionResponse
13
15
 
14
16
  __all__ = [
15
17
  "Base",
18
+ "LogEvent",
16
19
  "DeploymentCreate",
17
20
  "DeploymentResponse",
18
21
  "DeploymentUpdate",
@@ -24,4 +27,5 @@ __all__ = [
24
27
  "RepositoryValidationRequest",
25
28
  "ProjectSummary",
26
29
  "ProjectsListResponse",
30
+ "VersionResponse",
27
31
  ]
@@ -1,11 +1,11 @@
1
- from typing import Literal
2
1
  from datetime import datetime
2
+ from pathlib import Path
3
+ from typing import Literal
3
4
 
4
5
  from pydantic import HttpUrl
5
6
 
6
7
  from .base import Base
7
8
 
8
-
9
9
  # K8s CRD phase values
10
10
  LlamaDeploymentPhase = Literal[
11
11
  "Syncing", # Initial reconciliation phase - controller is processing the deployment
@@ -18,6 +18,15 @@ LlamaDeploymentPhase = Literal[
18
18
  ]
19
19
 
20
20
 
21
+ class DeploymentEvent(Base):
22
+ message: str | None = None
23
+ reason: str | None = None
24
+ type: str | None = None
25
+ first_timestamp: datetime | None = None
26
+ last_timestamp: datetime | None = None
27
+ count: int | None = None
28
+
29
+
21
30
  class DeploymentResponse(Base):
22
31
  id: str
23
32
  name: str
@@ -30,6 +39,8 @@ class DeploymentResponse(Base):
30
39
  secret_names: list[str] | None = None
31
40
  apiserver_url: HttpUrl | None
32
41
  status: LlamaDeploymentPhase
42
+ warning: str | None = None
43
+ events: list[DeploymentEvent] | None = None
33
44
 
34
45
 
35
46
  class DeploymentsListResponse(Base):
@@ -65,11 +76,13 @@ class LlamaDeploymentSpec(Base):
65
76
 
66
77
  projectId: str
67
78
  repoUrl: str
68
- deploymentFilePath: str = "llama_deploy.yaml"
79
+ deploymentFilePath: str = "."
69
80
  gitRef: str | None = None
70
81
  gitSha: str | None = None
71
82
  name: str
72
83
  secretName: str | None = None
84
+ # when true, the deployment will prebuild the UI assets and serve them from a static file server
85
+ staticAssetsPath: str | None = None
73
86
 
74
87
 
75
88
  class LlamaDeploymentStatus(Base):
@@ -108,6 +121,7 @@ class DeploymentUpdate(Base):
108
121
  git_sha: str | None = None
109
122
  personal_access_token: str | None = None
110
123
  secrets: dict[str, str | None] | None = None
124
+ static_assets_path: Path | None = None
111
125
 
112
126
 
113
127
  class DeploymentUpdateResult(Base):
@@ -157,6 +171,11 @@ def apply_deployment_update(
157
171
  if update.git_sha is not None:
158
172
  updated_spec.gitSha = None if update.git_sha == "" else update.git_sha
159
173
 
174
+ # always apply this, as it should be cleared out if none, and is only set by the server
175
+ updated_spec.staticAssetsPath = (
176
+ str(update.static_assets_path) if update.static_assets_path else None
177
+ )
178
+
160
179
  # Track secret changes
161
180
  secret_adds: dict[str, str] = {}
162
181
  secret_removes: list[str] = []
@@ -186,3 +205,10 @@ def apply_deployment_update(
186
205
  secret_adds=secret_adds,
187
206
  secret_removes=secret_removes,
188
207
  )
208
+
209
+
210
+ class LogEvent(Base):
211
+ pod: str
212
+ container: str
213
+ text: str
214
+ timestamp: datetime
@@ -1,4 +1,5 @@
1
- from typing import Optional
1
+ from pathlib import Path
2
+
2
3
  from pydantic import BaseModel, Field
3
4
 
4
5
 
@@ -17,11 +18,11 @@ class RepositoryValidationResponse(BaseModel):
17
18
  default=False,
18
19
  description="True if validation succeeded via GitHub App for a deployment that previously used a PAT",
19
20
  )
20
- github_app_name: Optional[str] = Field(
21
+ github_app_name: str | None = Field(
21
22
  default=None,
22
23
  description="Name of the GitHub App if repository is a private GitHub repo and server has GitHub App configured",
23
24
  )
24
- github_app_installation_url: Optional[str] = Field(
25
+ github_app_installation_url: str | None = Field(
25
26
  default=None,
26
27
  description="GitHub App installation URL if repository is a private GitHub repo and server has GitHub App configured",
27
28
  )
@@ -29,8 +30,8 @@ class RepositoryValidationResponse(BaseModel):
29
30
 
30
31
  class RepositoryValidationRequest(BaseModel):
31
32
  repository_url: str
32
- deployment_id: Optional[str] = None
33
- pat: Optional[str] = None
33
+ deployment_id: str | None = None
34
+ pat: str | None = None
34
35
 
35
36
 
36
37
  class GitApplicationValidationResponse(BaseModel):
@@ -44,3 +45,7 @@ class GitApplicationValidationResponse(BaseModel):
44
45
  git_ref: str | None = None
45
46
  git_sha: str | None = None
46
47
  valid_deployment_file_path: str | None = None
48
+ ui_build_output_path: Path | None = Field(
49
+ default=None,
50
+ description="Path to the UI build output, if the deployment's UI has a package.json with a build script; None if no UI is configured",
51
+ )
@@ -1,3 +1,7 @@
1
+ from typing import Any
2
+
3
+ from pydantic import model_validator
4
+
1
5
  from .base import Base
2
6
 
3
7
 
@@ -5,8 +9,18 @@ class ProjectSummary(Base):
5
9
  """Summary of a project with deployment count"""
6
10
 
7
11
  project_id: str
12
+ project_name: str
8
13
  deployment_count: int
9
14
 
15
+ @model_validator(mode="before")
16
+ @classmethod
17
+ def set_default_project_name(cls, data: Any) -> Any:
18
+ if isinstance(data, dict):
19
+ if "project_name" not in data or data.get("project_name") is None:
20
+ if "project_id" in data:
21
+ data["project_name"] = data["project_id"]
22
+ return data
23
+
10
24
 
11
25
  class ProjectsListResponse(Base):
12
26
  """Response model for listing projects with deployment counts"""
@@ -0,0 +1,7 @@
1
+ from .base import Base
2
+
3
+
4
+ class VersionResponse(Base):
5
+ version: str
6
+ requires_auth: bool = False
7
+ min_llamactl_version: str | None = None
@@ -0,0 +1,14 @@
1
+ from ._abstract_deployments_service import (
2
+ AbstractDeploymentsService,
3
+ AbstractPublicDeploymentsService,
4
+ )
5
+ from ._create_deployments_router import create_v1beta1_deployments_router
6
+ from ._exceptions import DeploymentNotFoundError, ReplicaSetNotFoundError
7
+
8
+ __all__ = [
9
+ "AbstractDeploymentsService",
10
+ "AbstractPublicDeploymentsService",
11
+ "create_v1beta1_deployments_router",
12
+ "DeploymentNotFoundError",
13
+ "ReplicaSetNotFoundError",
14
+ ]