lightning-sdk 0.2.0__py3-none-any.whl → 0.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lightning_sdk/__init__.py +1 -1
- lightning_sdk/api/lit_container_api.py +16 -0
- lightning_sdk/cli/docker.py +3 -58
- lightning_sdk/cli/download.py +28 -24
- lightning_sdk/cli/serve.py +17 -53
- lightning_sdk/lightning_cloud/openapi/__init__.py +2 -0
- lightning_sdk/lightning_cloud/openapi/api/pipelines_service_api.py +5 -1
- lightning_sdk/lightning_cloud/openapi/models/__init__.py +2 -0
- lightning_sdk/lightning_cloud/openapi/models/pipelines_id_body.py +41 -15
- lightning_sdk/lightning_cloud/openapi/models/project_id_pipelines_body.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_filesystem_job.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_filesystem_mmt.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_job_artifacts_type.py +103 -0
- lightning_sdk/lightning_cloud/openapi/models/v1_organization.py +53 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_pipeline.py +41 -15
- lightning_sdk/lightning_cloud/openapi/models/v1_pipeline_schedule.py +149 -0
- lightning_sdk/serve.py +134 -0
- {lightning_sdk-0.2.0.dist-info → lightning_sdk-0.2.2.dist-info}/METADATA +1 -1
- {lightning_sdk-0.2.0.dist-info → lightning_sdk-0.2.2.dist-info}/RECORD +23 -20
- {lightning_sdk-0.2.0.dist-info → lightning_sdk-0.2.2.dist-info}/LICENSE +0 -0
- {lightning_sdk-0.2.0.dist-info → lightning_sdk-0.2.2.dist-info}/WHEEL +0 -0
- {lightning_sdk-0.2.0.dist-info → lightning_sdk-0.2.2.dist-info}/entry_points.txt +0 -0
- {lightning_sdk-0.2.0.dist-info → lightning_sdk-0.2.2.dist-info}/top_level.txt +0 -0
lightning_sdk/__init__.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import inspect
|
|
1
2
|
import time
|
|
2
3
|
from typing import Any, Callable, Dict, Generator, Iterator, List
|
|
3
4
|
|
|
@@ -21,6 +22,16 @@ class DockerPushError(Exception):
|
|
|
21
22
|
|
|
22
23
|
|
|
23
24
|
def retry_on_lcr_auth_failure(func: Callable) -> Callable:
|
|
25
|
+
def generator_wrapper(self: "LitContainerApi", *args: Any, **kwargs: Any) -> Callable:
|
|
26
|
+
try:
|
|
27
|
+
gen = func(self, *args, **kwargs)
|
|
28
|
+
yield from gen
|
|
29
|
+
except LCRAuthFailedError:
|
|
30
|
+
self.authenticate(reauth=True)
|
|
31
|
+
gen = func(self, *args, **kwargs)
|
|
32
|
+
yield from gen
|
|
33
|
+
return
|
|
34
|
+
|
|
24
35
|
def wrapper(self: "LitContainerApi", *args: Any, **kwargs: Any) -> Callable:
|
|
25
36
|
try:
|
|
26
37
|
return func(self, *args, **kwargs)
|
|
@@ -28,6 +39,9 @@ def retry_on_lcr_auth_failure(func: Callable) -> Callable:
|
|
|
28
39
|
self.authenticate(reauth=True)
|
|
29
40
|
return func(self, *args, **kwargs)
|
|
30
41
|
|
|
42
|
+
if inspect.isgeneratorfunction(func):
|
|
43
|
+
return generator_wrapper
|
|
44
|
+
|
|
31
45
|
return wrapper
|
|
32
46
|
|
|
33
47
|
|
|
@@ -45,6 +59,7 @@ class LitContainerApi:
|
|
|
45
59
|
) from None
|
|
46
60
|
|
|
47
61
|
def authenticate(self, reauth: bool = False) -> bool:
|
|
62
|
+
resp = None
|
|
48
63
|
try:
|
|
49
64
|
authed_user = self._client.auth_service_get_user()
|
|
50
65
|
username = authed_user.username
|
|
@@ -80,6 +95,7 @@ class LitContainerApi:
|
|
|
80
95
|
except Exception as e:
|
|
81
96
|
raise ValueError(f"Could not delete container {container} from project {project_id}: {e!s}") from e
|
|
82
97
|
|
|
98
|
+
@retry_on_lcr_auth_failure
|
|
83
99
|
def upload_container(self, container: str, teamspace: Teamspace, tag: str) -> Generator[dict, None, None]:
|
|
84
100
|
try:
|
|
85
101
|
self._docker_client.images.get(container)
|
lightning_sdk/cli/docker.py
CHANGED
|
@@ -1,9 +1,6 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import warnings
|
|
3
|
-
from pathlib import Path
|
|
4
|
-
|
|
5
1
|
import click
|
|
6
|
-
|
|
2
|
+
|
|
3
|
+
from lightning_sdk.serve import _LitServeDeployer
|
|
7
4
|
|
|
8
5
|
|
|
9
6
|
@click.group(name="dockerize")
|
|
@@ -22,56 +19,4 @@ def api(server_filename: str, port: int = 8000, gpu: bool = False, tag: str = "l
|
|
|
22
19
|
|
|
23
20
|
|
|
24
21
|
def _api(server_filename: str, port: int = 8000, gpu: bool = False, tag: str = "litserve-model") -> str:
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
if os.path.exists("Dockerfile"):
|
|
28
|
-
console.print("Dockerfile already exists. Skipping generation.")
|
|
29
|
-
return os.path.abspath("Dockerfile")
|
|
30
|
-
|
|
31
|
-
import litserve as ls
|
|
32
|
-
from litserve import docker_builder
|
|
33
|
-
|
|
34
|
-
requirements = ""
|
|
35
|
-
if os.path.exists("requirements.txt"):
|
|
36
|
-
requirements = "-r requirements.txt"
|
|
37
|
-
else:
|
|
38
|
-
warnings.warn(
|
|
39
|
-
f"requirements.txt not found at {os.getcwd()}. "
|
|
40
|
-
f"Make sure to install the required packages in the Dockerfile.",
|
|
41
|
-
UserWarning,
|
|
42
|
-
)
|
|
43
|
-
|
|
44
|
-
current_dir = Path.cwd()
|
|
45
|
-
if not (current_dir / server_filename).is_file():
|
|
46
|
-
raise FileNotFoundError(f"Server file `{server_filename}` must be in the current directory: {os.getcwd()}")
|
|
47
|
-
|
|
48
|
-
version = ls.__version__
|
|
49
|
-
if gpu:
|
|
50
|
-
run_cmd = f"docker run --gpus all -p {port}:{port} {tag}:latest"
|
|
51
|
-
docker_template = docker_builder.CUDA_DOCKER_TEMPLATE
|
|
52
|
-
else:
|
|
53
|
-
run_cmd = f"docker run -p {port}:{port} {tag}:latest"
|
|
54
|
-
docker_template = docker_builder.DOCKERFILE_TEMPLATE
|
|
55
|
-
dockerfile_content = docker_template.format(
|
|
56
|
-
server_filename=server_filename,
|
|
57
|
-
port=port,
|
|
58
|
-
version=version,
|
|
59
|
-
requirements=requirements,
|
|
60
|
-
)
|
|
61
|
-
with open("Dockerfile", "w") as f:
|
|
62
|
-
f.write(dockerfile_content)
|
|
63
|
-
|
|
64
|
-
success_msg = f"""[bold]Dockerfile created successfully[/bold]
|
|
65
|
-
Update [underline]{os.path.abspath("Dockerfile")}[/underline] to add any additional dependencies or commands.
|
|
66
|
-
|
|
67
|
-
[bold]Build the container with:[/bold]
|
|
68
|
-
> [underline]docker build -t {tag} .[/underline]
|
|
69
|
-
|
|
70
|
-
[bold]To run the Docker container on the machine:[/bold]
|
|
71
|
-
> [underline]{run_cmd}[/underline]
|
|
72
|
-
|
|
73
|
-
[bold]To push the container to a registry:[/bold]
|
|
74
|
-
> [underline]docker push {tag}[/underline]
|
|
75
|
-
"""
|
|
76
|
-
console.print(success_msg)
|
|
77
|
-
return os.path.abspath("Dockerfile")
|
|
22
|
+
return _LitServeDeployer().dockerize_api(server_filename=server_filename, port=port, gpu=gpu, tag=tag)
|
lightning_sdk/cli/download.py
CHANGED
|
@@ -41,15 +41,7 @@ def model(name: str, download_dir: str = ".") -> None:
|
|
|
41
41
|
|
|
42
42
|
|
|
43
43
|
@download.command(name="folder")
|
|
44
|
-
@click.
|
|
45
|
-
"--path",
|
|
46
|
-
default="",
|
|
47
|
-
help=(
|
|
48
|
-
"The relative path within the Studio you want to download. "
|
|
49
|
-
"If you leave it empty it will download whole studio and locally creates a "
|
|
50
|
-
"new folder with the same name as the selected studio."
|
|
51
|
-
),
|
|
52
|
-
)
|
|
44
|
+
@click.argument("path")
|
|
53
45
|
@click.option(
|
|
54
46
|
"--studio",
|
|
55
47
|
default=None,
|
|
@@ -62,16 +54,26 @@ def model(name: str, download_dir: str = ".") -> None:
|
|
|
62
54
|
),
|
|
63
55
|
)
|
|
64
56
|
@click.option(
|
|
65
|
-
"--local-path",
|
|
57
|
+
"--local-path",
|
|
58
|
+
"--local_path",
|
|
59
|
+
default=".",
|
|
60
|
+
type=click.Path(file_okay=False, dir_okay=True),
|
|
61
|
+
help="The path to the directory you want to download the folder to.",
|
|
66
62
|
)
|
|
67
63
|
def folder(path: str = "", studio: Optional[str] = None, local_path: str = ".") -> None:
|
|
68
|
-
"""Download a folder from a Studio.
|
|
64
|
+
"""Download a folder from a Studio.
|
|
65
|
+
|
|
66
|
+
Example:
|
|
67
|
+
lightning download folder PATH
|
|
68
|
+
|
|
69
|
+
PATH: The relative path within the Studio you want to download.
|
|
70
|
+
Defaults to the entire studio.
|
|
71
|
+
"""
|
|
69
72
|
local_path = Path(local_path)
|
|
70
73
|
if not local_path.is_dir():
|
|
71
74
|
raise NotADirectoryError(f"'{local_path}' is not a directory")
|
|
72
75
|
|
|
73
|
-
|
|
74
|
-
resolved_studio = menu._resolve_studio(studio)
|
|
76
|
+
resolved_studio = _resolve_studio(studio)
|
|
75
77
|
|
|
76
78
|
if not path:
|
|
77
79
|
local_path /= resolved_studio.name
|
|
@@ -89,15 +91,7 @@ def folder(path: str = "", studio: Optional[str] = None, local_path: str = ".")
|
|
|
89
91
|
|
|
90
92
|
|
|
91
93
|
@download.command(name="file")
|
|
92
|
-
@click.
|
|
93
|
-
"--path",
|
|
94
|
-
default="",
|
|
95
|
-
help=(
|
|
96
|
-
"The relative path within the Studio you want to download. "
|
|
97
|
-
"If you leave it empty it will download whole studio and locally creates a new folder "
|
|
98
|
-
"with the same name as the selected studio."
|
|
99
|
-
),
|
|
100
|
-
)
|
|
94
|
+
@click.argument("path")
|
|
101
95
|
@click.option(
|
|
102
96
|
"--studio",
|
|
103
97
|
default=None,
|
|
@@ -110,10 +104,20 @@ def folder(path: str = "", studio: Optional[str] = None, local_path: str = ".")
|
|
|
110
104
|
),
|
|
111
105
|
)
|
|
112
106
|
@click.option(
|
|
113
|
-
"--local-path",
|
|
107
|
+
"--local-path",
|
|
108
|
+
"--local_path",
|
|
109
|
+
default=".",
|
|
110
|
+
type=click.Path(file_okay=False, dir_okay=True),
|
|
111
|
+
help="The path to the directory you want to download the file to.",
|
|
114
112
|
)
|
|
115
113
|
def file(path: str = "", studio: Optional[str] = None, local_path: str = ".") -> None:
|
|
116
|
-
"""Download a file from a Studio.
|
|
114
|
+
"""Download a file from a Studio.
|
|
115
|
+
|
|
116
|
+
Example:
|
|
117
|
+
lightning download file PATH
|
|
118
|
+
|
|
119
|
+
PATH: The relative path to the file within the Studio you want to download.
|
|
120
|
+
"""
|
|
117
121
|
local_path = Path(local_path)
|
|
118
122
|
if not local_path.is_dir():
|
|
119
123
|
raise NotADirectoryError(f"'{local_path}' is not a directory")
|
lightning_sdk/cli/serve.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import os
|
|
2
1
|
import subprocess
|
|
3
2
|
from pathlib import Path
|
|
4
3
|
from typing import Optional, Union
|
|
@@ -9,7 +8,9 @@ from rich.console import Console
|
|
|
9
8
|
from rich.progress import Progress, SpinnerColumn, TextColumn, TimeElapsedColumn
|
|
10
9
|
from rich.prompt import Confirm
|
|
11
10
|
|
|
12
|
-
from lightning_sdk.
|
|
11
|
+
from lightning_sdk.api.lit_container_api import LitContainerApi
|
|
12
|
+
from lightning_sdk.cli.teamspace_menu import _TeamspacesMenu
|
|
13
|
+
from lightning_sdk.serve import _LitServeDeployer
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
@click.group("serve")
|
|
@@ -84,7 +85,8 @@ def api_impl(
|
|
|
84
85
|
if not script_path.is_file():
|
|
85
86
|
raise ValueError(f"Path is not a file: {script_path}")
|
|
86
87
|
|
|
87
|
-
|
|
88
|
+
ls_deployer = _LitServeDeployer()
|
|
89
|
+
ls_deployer.generate_client() if easy else None
|
|
88
90
|
|
|
89
91
|
if cloud:
|
|
90
92
|
tag = repository if repository else "litserve-model"
|
|
@@ -101,30 +103,13 @@ def api_impl(
|
|
|
101
103
|
raise RuntimeError(error_msg) from None
|
|
102
104
|
|
|
103
105
|
|
|
104
|
-
def _generate_client(console: Console) -> None:
|
|
105
|
-
try:
|
|
106
|
-
from litserve.python_client import client_template
|
|
107
|
-
except ImportError:
|
|
108
|
-
raise ImportError(
|
|
109
|
-
"litserve is not installed. Please install it with `pip install lightning_sdk[serve]`"
|
|
110
|
-
) from None
|
|
111
|
-
|
|
112
|
-
client_path = Path("client.py")
|
|
113
|
-
if client_path.exists():
|
|
114
|
-
console.print("Skipping client generation: client.py already exists", style="blue")
|
|
115
|
-
else:
|
|
116
|
-
try:
|
|
117
|
-
client_path.write_text(client_template)
|
|
118
|
-
console.print("✅ Client generated at client.py", style="bold green")
|
|
119
|
-
except OSError as e:
|
|
120
|
-
raise OSError(f"Failed to generate client.py: {e!s}") from None
|
|
121
|
-
|
|
122
|
-
|
|
123
106
|
def _handle_cloud(
|
|
124
107
|
script_path: Union[str, Path],
|
|
125
108
|
console: Console,
|
|
126
109
|
gpu: bool,
|
|
127
|
-
|
|
110
|
+
repository: str = "litserve-model",
|
|
111
|
+
tag: Optional[str] = None,
|
|
112
|
+
teamspace: Optional[str] = None,
|
|
128
113
|
non_interactive: bool = False,
|
|
129
114
|
) -> None:
|
|
130
115
|
try:
|
|
@@ -133,8 +118,8 @@ def _handle_cloud(
|
|
|
133
118
|
except docker.errors.DockerException as e:
|
|
134
119
|
raise RuntimeError(f"Failed to connect to Docker daemon: {e!s}. Is Docker running?") from None
|
|
135
120
|
|
|
136
|
-
|
|
137
|
-
|
|
121
|
+
ls_deployer = _LitServeDeployer()
|
|
122
|
+
path = ls_deployer.dockerize_api(script_path, port=8000, gpu=gpu, tag=tag)
|
|
138
123
|
console.clear()
|
|
139
124
|
if non_interactive:
|
|
140
125
|
console.print("[italic]non-interactive[/italic] mode enabled, skipping confirmation prompts", style="blue")
|
|
@@ -145,6 +130,11 @@ def _handle_cloud(
|
|
|
145
130
|
console.print("Please fix the Dockerfile and try again.", style="red")
|
|
146
131
|
return
|
|
147
132
|
|
|
133
|
+
tag = tag if tag else "latest"
|
|
134
|
+
|
|
135
|
+
lit_cr = LitContainerApi()
|
|
136
|
+
menu = _TeamspacesMenu()
|
|
137
|
+
teamspace = menu._resolve_teamspace(teamspace)
|
|
148
138
|
with Progress(
|
|
149
139
|
SpinnerColumn(),
|
|
150
140
|
TextColumn("[progress.description]{task.description}"),
|
|
@@ -152,35 +142,9 @@ def _handle_cloud(
|
|
|
152
142
|
console=console,
|
|
153
143
|
transient=False,
|
|
154
144
|
) as progress:
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
for line in build_status:
|
|
158
|
-
if "error" in line:
|
|
159
|
-
progress.stop()
|
|
160
|
-
console.print(f"\n[red]{line}[/red]")
|
|
161
|
-
return
|
|
162
|
-
if "stream" in line and line["stream"].strip():
|
|
163
|
-
console.print(line["stream"].strip(), style="bright_black")
|
|
164
|
-
progress.update(build_task, description="Building Docker image")
|
|
165
|
-
|
|
166
|
-
progress.update(build_task, description="[green]Build completed![/green]")
|
|
167
|
-
|
|
168
|
-
push_task = progress.add_task("Pushing to registry", total=None)
|
|
169
|
-
console.print("\nPushing image...", style="bold blue")
|
|
170
|
-
push_status = client.api.push(tag, stream=True, decode=True)
|
|
171
|
-
for line in push_status:
|
|
172
|
-
if "error" in line:
|
|
173
|
-
progress.stop()
|
|
174
|
-
console.print(f"\n[red]{line}[/red]")
|
|
175
|
-
return
|
|
176
|
-
if "status" in line:
|
|
177
|
-
console.print(line["status"], style="bright_black")
|
|
178
|
-
progress.update(push_task, description="Pushing to registry")
|
|
179
|
-
|
|
180
|
-
progress.update(push_task, description="[green]Push completed![/green]")
|
|
181
|
-
|
|
145
|
+
ls_deployer._build_container(path, repository, tag, console, progress)
|
|
146
|
+
ls_deployer._push_container(repository, tag, teamspace, lit_cr, progress)
|
|
182
147
|
console.print(f"\n✅ Image pushed to {tag}", style="bold green")
|
|
183
148
|
console.print(
|
|
184
149
|
"Soon you will be able to deploy this model to the Lightning Studio!",
|
|
185
150
|
)
|
|
186
|
-
# TODO: Deploy to the cloud
|
|
@@ -509,6 +509,7 @@ from lightning_sdk.lightning_cloud.openapi.models.v1_invalidate_cloud_space_inst
|
|
|
509
509
|
from lightning_sdk.lightning_cloud.openapi.models.v1_invite_project_membership_response import V1InviteProjectMembershipResponse
|
|
510
510
|
from lightning_sdk.lightning_cloud.openapi.models.v1_job import V1Job
|
|
511
511
|
from lightning_sdk.lightning_cloud.openapi.models.v1_job_action import V1JobAction
|
|
512
|
+
from lightning_sdk.lightning_cloud.openapi.models.v1_job_artifacts_type import V1JobArtifactsType
|
|
512
513
|
from lightning_sdk.lightning_cloud.openapi.models.v1_job_file import V1JobFile
|
|
513
514
|
from lightning_sdk.lightning_cloud.openapi.models.v1_job_health_check_config import V1JobHealthCheckConfig
|
|
514
515
|
from lightning_sdk.lightning_cloud.openapi.models.v1_job_log_entry import V1JobLogEntry
|
|
@@ -685,6 +686,7 @@ from lightning_sdk.lightning_cloud.openapi.models.v1_path_mapping import V1PathM
|
|
|
685
686
|
from lightning_sdk.lightning_cloud.openapi.models.v1_path_telemetry import V1PathTelemetry
|
|
686
687
|
from lightning_sdk.lightning_cloud.openapi.models.v1_phase_type import V1PhaseType
|
|
687
688
|
from lightning_sdk.lightning_cloud.openapi.models.v1_pipeline import V1Pipeline
|
|
689
|
+
from lightning_sdk.lightning_cloud.openapi.models.v1_pipeline_schedule import V1PipelineSchedule
|
|
688
690
|
from lightning_sdk.lightning_cloud.openapi.models.v1_pipeline_step import V1PipelineStep
|
|
689
691
|
from lightning_sdk.lightning_cloud.openapi.models.v1_pipeline_step_status import V1PipelineStepStatus
|
|
690
692
|
from lightning_sdk.lightning_cloud.openapi.models.v1_pipeline_step_type import V1PipelineStepType
|
|
@@ -461,6 +461,7 @@ class PipelinesServiceApi(object):
|
|
|
461
461
|
|
|
462
462
|
:param async_req bool
|
|
463
463
|
:param str project_id: (required)
|
|
464
|
+
:param str parent_pipeline_id:
|
|
464
465
|
:param str page_token:
|
|
465
466
|
:param int limit:
|
|
466
467
|
:param str state:
|
|
@@ -485,6 +486,7 @@ class PipelinesServiceApi(object):
|
|
|
485
486
|
|
|
486
487
|
:param async_req bool
|
|
487
488
|
:param str project_id: (required)
|
|
489
|
+
:param str parent_pipeline_id:
|
|
488
490
|
:param str page_token:
|
|
489
491
|
:param int limit:
|
|
490
492
|
:param str state:
|
|
@@ -493,7 +495,7 @@ class PipelinesServiceApi(object):
|
|
|
493
495
|
returns the request thread.
|
|
494
496
|
"""
|
|
495
497
|
|
|
496
|
-
all_params = ['project_id', 'page_token', 'limit', 'state'] # noqa: E501
|
|
498
|
+
all_params = ['project_id', 'parent_pipeline_id', 'page_token', 'limit', 'state'] # noqa: E501
|
|
497
499
|
all_params.append('async_req')
|
|
498
500
|
all_params.append('_return_http_data_only')
|
|
499
501
|
all_params.append('_preload_content')
|
|
@@ -520,6 +522,8 @@ class PipelinesServiceApi(object):
|
|
|
520
522
|
path_params['projectId'] = params['project_id'] # noqa: E501
|
|
521
523
|
|
|
522
524
|
query_params = []
|
|
525
|
+
if 'parent_pipeline_id' in params:
|
|
526
|
+
query_params.append(('parentPipelineId', params['parent_pipeline_id'])) # noqa: E501
|
|
523
527
|
if 'page_token' in params:
|
|
524
528
|
query_params.append(('pageToken', params['page_token'])) # noqa: E501
|
|
525
529
|
if 'limit' in params:
|
|
@@ -471,6 +471,7 @@ from lightning_sdk.lightning_cloud.openapi.models.v1_invalidate_cloud_space_inst
|
|
|
471
471
|
from lightning_sdk.lightning_cloud.openapi.models.v1_invite_project_membership_response import V1InviteProjectMembershipResponse
|
|
472
472
|
from lightning_sdk.lightning_cloud.openapi.models.v1_job import V1Job
|
|
473
473
|
from lightning_sdk.lightning_cloud.openapi.models.v1_job_action import V1JobAction
|
|
474
|
+
from lightning_sdk.lightning_cloud.openapi.models.v1_job_artifacts_type import V1JobArtifactsType
|
|
474
475
|
from lightning_sdk.lightning_cloud.openapi.models.v1_job_file import V1JobFile
|
|
475
476
|
from lightning_sdk.lightning_cloud.openapi.models.v1_job_health_check_config import V1JobHealthCheckConfig
|
|
476
477
|
from lightning_sdk.lightning_cloud.openapi.models.v1_job_log_entry import V1JobLogEntry
|
|
@@ -647,6 +648,7 @@ from lightning_sdk.lightning_cloud.openapi.models.v1_path_mapping import V1PathM
|
|
|
647
648
|
from lightning_sdk.lightning_cloud.openapi.models.v1_path_telemetry import V1PathTelemetry
|
|
648
649
|
from lightning_sdk.lightning_cloud.openapi.models.v1_phase_type import V1PhaseType
|
|
649
650
|
from lightning_sdk.lightning_cloud.openapi.models.v1_pipeline import V1Pipeline
|
|
651
|
+
from lightning_sdk.lightning_cloud.openapi.models.v1_pipeline_schedule import V1PipelineSchedule
|
|
650
652
|
from lightning_sdk.lightning_cloud.openapi.models.v1_pipeline_step import V1PipelineStep
|
|
651
653
|
from lightning_sdk.lightning_cloud.openapi.models.v1_pipeline_step_status import V1PipelineStepStatus
|
|
652
654
|
from lightning_sdk.lightning_cloud.openapi.models.v1_pipeline_step_type import V1PipelineStepType
|
|
@@ -47,7 +47,8 @@ class PipelinesIdBody(object):
|
|
|
47
47
|
'error': 'str',
|
|
48
48
|
'message': 'str',
|
|
49
49
|
'name': 'str',
|
|
50
|
-
'
|
|
50
|
+
'parent_pipeline_id': 'str',
|
|
51
|
+
'schedules': 'list[V1PipelineSchedule]',
|
|
51
52
|
'shared_filesystem': 'V1SharedFilesystem',
|
|
52
53
|
'state': 'str',
|
|
53
54
|
'statuses': 'list[V1PipelineStepStatus]',
|
|
@@ -63,7 +64,8 @@ class PipelinesIdBody(object):
|
|
|
63
64
|
'error': 'error',
|
|
64
65
|
'message': 'message',
|
|
65
66
|
'name': 'name',
|
|
66
|
-
'
|
|
67
|
+
'parent_pipeline_id': 'parentPipelineId',
|
|
68
|
+
'schedules': 'schedules',
|
|
67
69
|
'shared_filesystem': 'sharedFilesystem',
|
|
68
70
|
'state': 'state',
|
|
69
71
|
'statuses': 'statuses',
|
|
@@ -72,7 +74,7 @@ class PipelinesIdBody(object):
|
|
|
72
74
|
'user_id': 'userId'
|
|
73
75
|
}
|
|
74
76
|
|
|
75
|
-
def __init__(self, cluster_id: 'str' =None, created_at: 'datetime' =None, display_name: 'str' =None, error: 'str' =None, message: 'str' =None, name: 'str' =None,
|
|
77
|
+
def __init__(self, cluster_id: 'str' =None, created_at: 'datetime' =None, display_name: 'str' =None, error: 'str' =None, message: 'str' =None, name: 'str' =None, parent_pipeline_id: 'str' =None, schedules: 'list[V1PipelineSchedule]' =None, shared_filesystem: 'V1SharedFilesystem' =None, state: 'str' =None, statuses: 'list[V1PipelineStepStatus]' =None, steps: 'list[V1PipelineStep]' =None, updated_at: 'datetime' =None, user_id: 'str' =None): # noqa: E501
|
|
76
78
|
"""PipelinesIdBody - a model defined in Swagger""" # noqa: E501
|
|
77
79
|
self._cluster_id = None
|
|
78
80
|
self._created_at = None
|
|
@@ -80,7 +82,8 @@ class PipelinesIdBody(object):
|
|
|
80
82
|
self._error = None
|
|
81
83
|
self._message = None
|
|
82
84
|
self._name = None
|
|
83
|
-
self.
|
|
85
|
+
self._parent_pipeline_id = None
|
|
86
|
+
self._schedules = None
|
|
84
87
|
self._shared_filesystem = None
|
|
85
88
|
self._state = None
|
|
86
89
|
self._statuses = None
|
|
@@ -100,8 +103,10 @@ class PipelinesIdBody(object):
|
|
|
100
103
|
self.message = message
|
|
101
104
|
if name is not None:
|
|
102
105
|
self.name = name
|
|
103
|
-
if
|
|
104
|
-
self.
|
|
106
|
+
if parent_pipeline_id is not None:
|
|
107
|
+
self.parent_pipeline_id = parent_pipeline_id
|
|
108
|
+
if schedules is not None:
|
|
109
|
+
self.schedules = schedules
|
|
105
110
|
if shared_filesystem is not None:
|
|
106
111
|
self.shared_filesystem = shared_filesystem
|
|
107
112
|
if state is not None:
|
|
@@ -242,25 +247,46 @@ class PipelinesIdBody(object):
|
|
|
242
247
|
self._name = name
|
|
243
248
|
|
|
244
249
|
@property
|
|
245
|
-
def
|
|
246
|
-
"""Gets the
|
|
250
|
+
def parent_pipeline_id(self) -> 'str':
|
|
251
|
+
"""Gets the parent_pipeline_id of this PipelinesIdBody. # noqa: E501
|
|
247
252
|
|
|
248
253
|
|
|
249
|
-
:return: The
|
|
254
|
+
:return: The parent_pipeline_id of this PipelinesIdBody. # noqa: E501
|
|
250
255
|
:rtype: str
|
|
251
256
|
"""
|
|
252
|
-
return self.
|
|
257
|
+
return self._parent_pipeline_id
|
|
253
258
|
|
|
254
|
-
@
|
|
255
|
-
def
|
|
256
|
-
"""Sets the
|
|
259
|
+
@parent_pipeline_id.setter
|
|
260
|
+
def parent_pipeline_id(self, parent_pipeline_id: 'str'):
|
|
261
|
+
"""Sets the parent_pipeline_id of this PipelinesIdBody.
|
|
257
262
|
|
|
258
263
|
|
|
259
|
-
:param
|
|
264
|
+
:param parent_pipeline_id: The parent_pipeline_id of this PipelinesIdBody. # noqa: E501
|
|
260
265
|
:type: str
|
|
261
266
|
"""
|
|
262
267
|
|
|
263
|
-
self.
|
|
268
|
+
self._parent_pipeline_id = parent_pipeline_id
|
|
269
|
+
|
|
270
|
+
@property
|
|
271
|
+
def schedules(self) -> 'list[V1PipelineSchedule]':
|
|
272
|
+
"""Gets the schedules of this PipelinesIdBody. # noqa: E501
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
:return: The schedules of this PipelinesIdBody. # noqa: E501
|
|
276
|
+
:rtype: list[V1PipelineSchedule]
|
|
277
|
+
"""
|
|
278
|
+
return self._schedules
|
|
279
|
+
|
|
280
|
+
@schedules.setter
|
|
281
|
+
def schedules(self, schedules: 'list[V1PipelineSchedule]'):
|
|
282
|
+
"""Sets the schedules of this PipelinesIdBody.
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
:param schedules: The schedules of this PipelinesIdBody. # noqa: E501
|
|
286
|
+
:type: list[V1PipelineSchedule]
|
|
287
|
+
"""
|
|
288
|
+
|
|
289
|
+
self._schedules = schedules
|
|
264
290
|
|
|
265
291
|
@property
|
|
266
292
|
def shared_filesystem(self) -> 'V1SharedFilesystem':
|
|
@@ -43,6 +43,7 @@ class ProjectIdPipelinesBody(object):
|
|
|
43
43
|
swagger_types = {
|
|
44
44
|
'cluster_id': 'str',
|
|
45
45
|
'name': 'str',
|
|
46
|
+
'schedules': 'list[V1PipelineSchedule]',
|
|
46
47
|
'shared_filesystem': 'V1SharedFilesystem',
|
|
47
48
|
'steps': 'list[V1PipelineStep]'
|
|
48
49
|
}
|
|
@@ -50,14 +51,16 @@ class ProjectIdPipelinesBody(object):
|
|
|
50
51
|
attribute_map = {
|
|
51
52
|
'cluster_id': 'clusterId',
|
|
52
53
|
'name': 'name',
|
|
54
|
+
'schedules': 'schedules',
|
|
53
55
|
'shared_filesystem': 'sharedFilesystem',
|
|
54
56
|
'steps': 'steps'
|
|
55
57
|
}
|
|
56
58
|
|
|
57
|
-
def __init__(self, cluster_id: 'str' =None, name: 'str' =None, shared_filesystem: 'V1SharedFilesystem' =None, steps: 'list[V1PipelineStep]' =None): # noqa: E501
|
|
59
|
+
def __init__(self, cluster_id: 'str' =None, name: 'str' =None, schedules: 'list[V1PipelineSchedule]' =None, shared_filesystem: 'V1SharedFilesystem' =None, steps: 'list[V1PipelineStep]' =None): # noqa: E501
|
|
58
60
|
"""ProjectIdPipelinesBody - a model defined in Swagger""" # noqa: E501
|
|
59
61
|
self._cluster_id = None
|
|
60
62
|
self._name = None
|
|
63
|
+
self._schedules = None
|
|
61
64
|
self._shared_filesystem = None
|
|
62
65
|
self._steps = None
|
|
63
66
|
self.discriminator = None
|
|
@@ -65,6 +68,8 @@ class ProjectIdPipelinesBody(object):
|
|
|
65
68
|
self.cluster_id = cluster_id
|
|
66
69
|
if name is not None:
|
|
67
70
|
self.name = name
|
|
71
|
+
if schedules is not None:
|
|
72
|
+
self.schedules = schedules
|
|
68
73
|
if shared_filesystem is not None:
|
|
69
74
|
self.shared_filesystem = shared_filesystem
|
|
70
75
|
if steps is not None:
|
|
@@ -112,6 +117,27 @@ class ProjectIdPipelinesBody(object):
|
|
|
112
117
|
|
|
113
118
|
self._name = name
|
|
114
119
|
|
|
120
|
+
@property
|
|
121
|
+
def schedules(self) -> 'list[V1PipelineSchedule]':
|
|
122
|
+
"""Gets the schedules of this ProjectIdPipelinesBody. # noqa: E501
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
:return: The schedules of this ProjectIdPipelinesBody. # noqa: E501
|
|
126
|
+
:rtype: list[V1PipelineSchedule]
|
|
127
|
+
"""
|
|
128
|
+
return self._schedules
|
|
129
|
+
|
|
130
|
+
@schedules.setter
|
|
131
|
+
def schedules(self, schedules: 'list[V1PipelineSchedule]'):
|
|
132
|
+
"""Sets the schedules of this ProjectIdPipelinesBody.
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
:param schedules: The schedules of this ProjectIdPipelinesBody. # noqa: E501
|
|
136
|
+
:type: list[V1PipelineSchedule]
|
|
137
|
+
"""
|
|
138
|
+
|
|
139
|
+
self._schedules = schedules
|
|
140
|
+
|
|
115
141
|
@property
|
|
116
142
|
def shared_filesystem(self) -> 'V1SharedFilesystem':
|
|
117
143
|
"""Gets the shared_filesystem of this ProjectIdPipelinesBody. # noqa: E501
|
|
@@ -41,6 +41,7 @@ class V1FilesystemJob(object):
|
|
|
41
41
|
and the value is json key in definition.
|
|
42
42
|
"""
|
|
43
43
|
swagger_types = {
|
|
44
|
+
'artifacts_type': 'V1JobArtifactsType',
|
|
44
45
|
'cloud_space_id': 'str',
|
|
45
46
|
'id': 'str',
|
|
46
47
|
'name': 'str',
|
|
@@ -48,19 +49,23 @@ class V1FilesystemJob(object):
|
|
|
48
49
|
}
|
|
49
50
|
|
|
50
51
|
attribute_map = {
|
|
52
|
+
'artifacts_type': 'artifactsType',
|
|
51
53
|
'cloud_space_id': 'cloudSpaceId',
|
|
52
54
|
'id': 'id',
|
|
53
55
|
'name': 'name',
|
|
54
56
|
'run_id': 'runId'
|
|
55
57
|
}
|
|
56
58
|
|
|
57
|
-
def __init__(self, cloud_space_id: 'str' =None, id: 'str' =None, name: 'str' =None, run_id: 'str' =None): # noqa: E501
|
|
59
|
+
def __init__(self, artifacts_type: 'V1JobArtifactsType' =None, cloud_space_id: 'str' =None, id: 'str' =None, name: 'str' =None, run_id: 'str' =None): # noqa: E501
|
|
58
60
|
"""V1FilesystemJob - a model defined in Swagger""" # noqa: E501
|
|
61
|
+
self._artifacts_type = None
|
|
59
62
|
self._cloud_space_id = None
|
|
60
63
|
self._id = None
|
|
61
64
|
self._name = None
|
|
62
65
|
self._run_id = None
|
|
63
66
|
self.discriminator = None
|
|
67
|
+
if artifacts_type is not None:
|
|
68
|
+
self.artifacts_type = artifacts_type
|
|
64
69
|
if cloud_space_id is not None:
|
|
65
70
|
self.cloud_space_id = cloud_space_id
|
|
66
71
|
if id is not None:
|
|
@@ -70,6 +75,27 @@ class V1FilesystemJob(object):
|
|
|
70
75
|
if run_id is not None:
|
|
71
76
|
self.run_id = run_id
|
|
72
77
|
|
|
78
|
+
@property
|
|
79
|
+
def artifacts_type(self) -> 'V1JobArtifactsType':
|
|
80
|
+
"""Gets the artifacts_type of this V1FilesystemJob. # noqa: E501
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
:return: The artifacts_type of this V1FilesystemJob. # noqa: E501
|
|
84
|
+
:rtype: V1JobArtifactsType
|
|
85
|
+
"""
|
|
86
|
+
return self._artifacts_type
|
|
87
|
+
|
|
88
|
+
@artifacts_type.setter
|
|
89
|
+
def artifacts_type(self, artifacts_type: 'V1JobArtifactsType'):
|
|
90
|
+
"""Sets the artifacts_type of this V1FilesystemJob.
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
:param artifacts_type: The artifacts_type of this V1FilesystemJob. # noqa: E501
|
|
94
|
+
:type: V1JobArtifactsType
|
|
95
|
+
"""
|
|
96
|
+
|
|
97
|
+
self._artifacts_type = artifacts_type
|
|
98
|
+
|
|
73
99
|
@property
|
|
74
100
|
def cloud_space_id(self) -> 'str':
|
|
75
101
|
"""Gets the cloud_space_id of this V1FilesystemJob. # noqa: E501
|