lightning-sdk 0.1.38__py3-none-any.whl → 0.1.39__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lightning_sdk/__init__.py +1 -1
- lightning_sdk/api/deployment_api.py +0 -2
- lightning_sdk/api/job_api.py +4 -0
- lightning_sdk/api/teamspace_api.py +4 -2
- lightning_sdk/api/utils.py +6 -3
- lightning_sdk/cli/download.py +3 -5
- lightning_sdk/cli/run.py +16 -0
- lightning_sdk/cli/upload.py +3 -10
- lightning_sdk/job/base.py +22 -0
- lightning_sdk/job/job.py +10 -1
- lightning_sdk/job/v1.py +5 -0
- lightning_sdk/job/v2.py +12 -0
- lightning_sdk/lightning_cloud/openapi/api/data_connection_service_api.py +6 -1
- lightning_sdk/lightning_cloud/openapi/api/models_store_api.py +118 -1
- lightning_sdk/lightning_cloud/openapi/models/id_start_body.py +29 -3
- lightning_sdk/lightning_cloud/openapi/models/project_id_cloudspaces_body.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_data_path.py +29 -3
- lightning_sdk/lightning_cloud/openapi/models/v1_job_spec.py +53 -53
- lightning_sdk/lightning_cloud/openapi/models/v1_multi_machine_job.py +27 -1
- lightning_sdk/lightning_cloud/openapi/models/v1_multi_machine_job_state.py +0 -2
- lightning_sdk/models.py +132 -0
- lightning_sdk/teamspace.py +3 -2
- {lightning_sdk-0.1.38.dist-info → lightning_sdk-0.1.39.dist-info}/METADATA +1 -1
- {lightning_sdk-0.1.38.dist-info → lightning_sdk-0.1.39.dist-info}/RECORD +28 -28
- lightning_sdk/cli/models.py +0 -68
- {lightning_sdk-0.1.38.dist-info → lightning_sdk-0.1.39.dist-info}/LICENSE +0 -0
- {lightning_sdk-0.1.38.dist-info → lightning_sdk-0.1.39.dist-info}/WHEEL +0 -0
- {lightning_sdk-0.1.38.dist-info → lightning_sdk-0.1.39.dist-info}/entry_points.txt +0 -0
- {lightning_sdk-0.1.38.dist-info → lightning_sdk-0.1.39.dist-info}/top_level.txt +0 -0
lightning_sdk/__init__.py
CHANGED
lightning_sdk/api/job_api.py
CHANGED
|
@@ -150,6 +150,8 @@ class JobApiV2:
|
|
|
150
150
|
env: Optional[Dict[str, str]],
|
|
151
151
|
image_credentials: Optional[str],
|
|
152
152
|
cluster_auth: bool,
|
|
153
|
+
artifacts_local: Optional[str],
|
|
154
|
+
artifacts_remote: Optional[str],
|
|
153
155
|
) -> V1Job:
|
|
154
156
|
env_vars = []
|
|
155
157
|
if env is not None:
|
|
@@ -171,6 +173,8 @@ class JobApiV2:
|
|
|
171
173
|
spot=interruptible,
|
|
172
174
|
image_cluster_credentials=cluster_auth,
|
|
173
175
|
image_secret_ref=image_credentials or "",
|
|
176
|
+
artifacts_source=artifacts_local or "",
|
|
177
|
+
artifacts_destination=artifacts_remote or "",
|
|
174
178
|
)
|
|
175
179
|
body = ProjectIdJobsBody(name=name, spec=spec)
|
|
176
180
|
|
|
@@ -260,12 +260,14 @@ class TeamspaceApi:
|
|
|
260
260
|
name: str,
|
|
261
261
|
version: str,
|
|
262
262
|
download_dir: Path,
|
|
263
|
-
|
|
263
|
+
teamspace_name: str,
|
|
264
|
+
teamspace_owner_name: str,
|
|
264
265
|
progress_bar: bool = True,
|
|
265
266
|
) -> List[str]:
|
|
266
267
|
return _download_model_files(
|
|
267
268
|
client=self._client,
|
|
268
|
-
|
|
269
|
+
teamspace_name=teamspace_name,
|
|
270
|
+
teamspace_owner_name=teamspace_owner_name,
|
|
269
271
|
name=name,
|
|
270
272
|
version=version,
|
|
271
273
|
download_dir=download_dir,
|
lightning_sdk/api/utils.py
CHANGED
|
@@ -513,7 +513,8 @@ def _get_model_version(client: LightningClient, teamspace_id: str, name: str, ve
|
|
|
513
513
|
|
|
514
514
|
def _download_model_files(
|
|
515
515
|
client: LightningClient,
|
|
516
|
-
|
|
516
|
+
teamspace_name: str,
|
|
517
|
+
teamspace_owner_name: str,
|
|
517
518
|
name: str,
|
|
518
519
|
version: str,
|
|
519
520
|
download_dir: Path,
|
|
@@ -521,7 +522,9 @@ def _download_model_files(
|
|
|
521
522
|
num_workers: int = 20,
|
|
522
523
|
) -> List[str]:
|
|
523
524
|
api = ModelsStoreApi(client.api_client)
|
|
524
|
-
response = api.models_store_get_model_files(
|
|
525
|
+
response = api.models_store_get_model_files(
|
|
526
|
+
project_name=teamspace_name, project_owner_name=teamspace_owner_name, name=name, version=version
|
|
527
|
+
)
|
|
525
528
|
|
|
526
529
|
pbar = None
|
|
527
530
|
if progress_bar:
|
|
@@ -541,7 +544,7 @@ def _download_model_files(
|
|
|
541
544
|
client=client,
|
|
542
545
|
model_id=response.model_id,
|
|
543
546
|
version=response.version,
|
|
544
|
-
teamspace_id=
|
|
547
|
+
teamspace_id=response.project_id,
|
|
545
548
|
remote_path=filepath,
|
|
546
549
|
file_path=str(local_file),
|
|
547
550
|
num_workers=num_workers,
|
lightning_sdk/cli/download.py
CHANGED
|
@@ -4,8 +4,8 @@ from pathlib import Path
|
|
|
4
4
|
from typing import Optional
|
|
5
5
|
|
|
6
6
|
from lightning_sdk.cli.exceptions import StudioCliError
|
|
7
|
-
from lightning_sdk.cli.models import _get_teamspace, _parse_model_name
|
|
8
7
|
from lightning_sdk.cli.studios_menu import _StudiosMenu
|
|
8
|
+
from lightning_sdk.models import download_model
|
|
9
9
|
from lightning_sdk.studio import Studio
|
|
10
10
|
from lightning_sdk.utils.resolve import _get_authed_user, skip_studio_init
|
|
11
11
|
|
|
@@ -21,10 +21,8 @@ class _Downloads(_StudiosMenu):
|
|
|
21
21
|
This should have the format <ORGANIZATION-NAME>/<TEAMSPACE-NAME>/<MODEL-NAME>.
|
|
22
22
|
download_dir: The directory where the Model should be downloaded.
|
|
23
23
|
"""
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
teamspace.download_model(
|
|
27
|
-
name=model_name,
|
|
24
|
+
download_model(
|
|
25
|
+
name=name,
|
|
28
26
|
download_dir=download_dir,
|
|
29
27
|
progress_bar=True,
|
|
30
28
|
)
|
lightning_sdk/cli/run.py
CHANGED
|
@@ -40,7 +40,19 @@ class _Run:
|
|
|
40
40
|
This should be the name of the respective credentials secret created on the Lightning AI platform.
|
|
41
41
|
cluster_auth: Whether to authenticate with the cluster to pull the image.
|
|
42
42
|
Required if the registry is part of a cluster provider (e.g. ECR).
|
|
43
|
+
artifacts_local: The path of inside the docker container, you want to persist images from.
|
|
44
|
+
CAUTION: When setting this to "/", it will effectively erase your container.
|
|
45
|
+
Only supported for jobs with a docker image compute environment.
|
|
46
|
+
artifacts_remote: The remote storage to persist your artifacts to.
|
|
47
|
+
Should be of format <CONNECTION_TYPE>:<CONNECTION_NAME>:<PATH_WITHIN_CONNECTION>.
|
|
48
|
+
PATH_WITHIN_CONNECTION hereby is a path relative to the connection's root.
|
|
49
|
+
E.g. efs:data:some-path would result in an EFS connection named `data` and to the path `some-path`
|
|
50
|
+
within it.
|
|
51
|
+
Note that the connection needs to be added to the teamspace already in order for it to be found.
|
|
52
|
+
Only supported for jobs with a docker image compute environment.
|
|
43
53
|
"""
|
|
54
|
+
# TODO: the docstrings from artifacts_local and artifacts_remote don't show up completely,
|
|
55
|
+
# might need to switch to explicit cli definition
|
|
44
56
|
self.job.__func__.__doc__ = docstr
|
|
45
57
|
|
|
46
58
|
# TODO: sadly, fire displays both Optional[type] and Union[type, None] as Optional[Optional]
|
|
@@ -61,6 +73,8 @@ class _Run:
|
|
|
61
73
|
interruptible: bool = False,
|
|
62
74
|
image_credentials: Optional[str] = None,
|
|
63
75
|
cluster_auth: bool = False,
|
|
76
|
+
artifacts_local: Optional[str] = None,
|
|
77
|
+
artifacts_remote: Optional[str] = None,
|
|
64
78
|
) -> None:
|
|
65
79
|
machine_enum = Machine(machine.upper())
|
|
66
80
|
Job.run(
|
|
@@ -77,4 +91,6 @@ class _Run:
|
|
|
77
91
|
interruptible=interruptible,
|
|
78
92
|
image_credentials=image_credentials,
|
|
79
93
|
cluster_auth=cluster_auth,
|
|
94
|
+
artifacts_local=artifacts_local,
|
|
95
|
+
artifacts_remote=artifacts_remote,
|
|
80
96
|
)
|
lightning_sdk/cli/upload.py
CHANGED
|
@@ -9,8 +9,8 @@ from tqdm import tqdm
|
|
|
9
9
|
|
|
10
10
|
from lightning_sdk.api.utils import _get_cloud_url
|
|
11
11
|
from lightning_sdk.cli.exceptions import StudioCliError
|
|
12
|
-
from lightning_sdk.cli.models import _get_teamspace, _parse_model_name
|
|
13
12
|
from lightning_sdk.cli.studios_menu import _StudiosMenu
|
|
13
|
+
from lightning_sdk.models import upload_model
|
|
14
14
|
from lightning_sdk.studio import Studio
|
|
15
15
|
from lightning_sdk.utils.resolve import _get_authed_user, skip_studio_init
|
|
16
16
|
|
|
@@ -20,7 +20,7 @@ class _Uploads(_StudiosMenu):
|
|
|
20
20
|
|
|
21
21
|
_studio_upload_status_path = "~/.lightning/studios/uploads"
|
|
22
22
|
|
|
23
|
-
def model(self, name: str, path:
|
|
23
|
+
def model(self, name: str, path: str = ".", cloud_account: Optional[str] = None) -> None:
|
|
24
24
|
"""Upload a Model.
|
|
25
25
|
|
|
26
26
|
Args:
|
|
@@ -29,14 +29,7 @@ class _Uploads(_StudiosMenu):
|
|
|
29
29
|
path: The path to the file or directory you want to upload. Defaults to the current directory.
|
|
30
30
|
cloud_account: The name of the cloud account to store the Model in.
|
|
31
31
|
"""
|
|
32
|
-
|
|
33
|
-
teamspace = _get_teamspace(name=teamspace_name, organization=org_name)
|
|
34
|
-
teamspace.upload_model(
|
|
35
|
-
path=path or ".",
|
|
36
|
-
name=model_name,
|
|
37
|
-
progress_bar=True,
|
|
38
|
-
cluster_id=cloud_account,
|
|
39
|
-
)
|
|
32
|
+
upload_model(name, path, cloud_account=cloud_account)
|
|
40
33
|
|
|
41
34
|
def _resolve_studio(self, studio: Optional[str]) -> Studio:
|
|
42
35
|
user = _get_authed_user()
|
lightning_sdk/job/base.py
CHANGED
|
@@ -52,6 +52,8 @@ class _BaseJob(ABC):
|
|
|
52
52
|
interruptible: bool = False,
|
|
53
53
|
image_credentials: Optional[str] = None,
|
|
54
54
|
cluster_auth: bool = False,
|
|
55
|
+
artifacts_local: Optional[str] = None,
|
|
56
|
+
artifacts_remote: Optional[str] = None,
|
|
55
57
|
) -> "_BaseJob":
|
|
56
58
|
from lightning_sdk.studio import Studio
|
|
57
59
|
|
|
@@ -89,12 +91,28 @@ class _BaseJob(ABC):
|
|
|
89
91
|
if cluster_auth:
|
|
90
92
|
raise ValueError("cluster_auth is only supported when using a custom image")
|
|
91
93
|
|
|
94
|
+
if artifacts_local is not None or artifacts_remote is not None:
|
|
95
|
+
raise ValueError(
|
|
96
|
+
"Specifying artifacts persistence is supported for docker images only. "
|
|
97
|
+
"Other jobs will automatically persist artifacts to the teamspace distributed filesystem."
|
|
98
|
+
)
|
|
99
|
+
|
|
92
100
|
else:
|
|
93
101
|
if studio is not None:
|
|
94
102
|
raise RuntimeError(
|
|
95
103
|
"image and studio are mutually exclusive as both define the environment to run the job in"
|
|
96
104
|
)
|
|
97
105
|
|
|
106
|
+
# they either need to specified both or none of them
|
|
107
|
+
if bool(artifacts_local) != bool(artifacts_remote):
|
|
108
|
+
raise ValueError("Artifact persistence requires both artifacts_local and artifacts_remote to be set")
|
|
109
|
+
|
|
110
|
+
if artifacts_remote and len(artifacts_remote.split(":")) != 3:
|
|
111
|
+
raise ValueError(
|
|
112
|
+
"Artifact persistence requires exactly three arguments separated by colon of kind "
|
|
113
|
+
f"<CONNECTION_TYPE>:<CONNECTION_NAME>:<PATH_WITHIN_CONNECTION>, got {artifacts_local}"
|
|
114
|
+
)
|
|
115
|
+
|
|
98
116
|
inst = cls(name=name, teamspace=teamspace, org=org, user=user, _fetch_job=False)
|
|
99
117
|
inst._submit(
|
|
100
118
|
machine=machine,
|
|
@@ -106,6 +124,8 @@ class _BaseJob(ABC):
|
|
|
106
124
|
interruptible=interruptible,
|
|
107
125
|
image_credentials=image_credentials,
|
|
108
126
|
cluster_auth=cluster_auth,
|
|
127
|
+
artifacts_local=artifacts_local,
|
|
128
|
+
artifacts_remote=artifacts_remote,
|
|
109
129
|
)
|
|
110
130
|
return inst
|
|
111
131
|
|
|
@@ -121,6 +141,8 @@ class _BaseJob(ABC):
|
|
|
121
141
|
cluster: Optional[str] = None,
|
|
122
142
|
image_credentials: Optional[str] = None,
|
|
123
143
|
cluster_auth: bool = False,
|
|
144
|
+
artifacts_local: Optional[str] = None,
|
|
145
|
+
artifacts_remote: Optional[str] = None,
|
|
124
146
|
) -> None:
|
|
125
147
|
"""Submits a job and updates the internal _job attribute as well as the _name attribute."""
|
|
126
148
|
|
lightning_sdk/job/job.py
CHANGED
|
@@ -60,6 +60,8 @@ class Job(_BaseJob):
|
|
|
60
60
|
interruptible: bool = False,
|
|
61
61
|
image_credentials: Optional[str] = None,
|
|
62
62
|
cluster_auth: bool = False,
|
|
63
|
+
artifacts_local: Optional[str] = None,
|
|
64
|
+
artifacts_remote: Optional[str] = None,
|
|
63
65
|
) -> "Job":
|
|
64
66
|
ret_val = super().run(
|
|
65
67
|
name=name,
|
|
@@ -75,6 +77,8 @@ class Job(_BaseJob):
|
|
|
75
77
|
interruptible=interruptible,
|
|
76
78
|
image_credentials=image_credentials,
|
|
77
79
|
cluster_auth=cluster_auth,
|
|
80
|
+
artifacts_local=artifacts_local,
|
|
81
|
+
artifacts_remote=artifacts_remote,
|
|
78
82
|
)
|
|
79
83
|
# required for typing with "Job"
|
|
80
84
|
assert isinstance(ret_val, cls)
|
|
@@ -91,8 +95,10 @@ class Job(_BaseJob):
|
|
|
91
95
|
cluster: Optional[str] = None,
|
|
92
96
|
image_credentials: Optional[str] = None,
|
|
93
97
|
cluster_auth: bool = False,
|
|
98
|
+
artifacts_local: Optional[str] = None,
|
|
99
|
+
artifacts_remote: Optional[str] = None,
|
|
94
100
|
) -> None:
|
|
95
|
-
|
|
101
|
+
self._job = self._internal_job._submit(
|
|
96
102
|
machine=machine,
|
|
97
103
|
cluster=cluster,
|
|
98
104
|
command=command,
|
|
@@ -102,7 +108,10 @@ class Job(_BaseJob):
|
|
|
102
108
|
interruptible=interruptible,
|
|
103
109
|
image_credentials=image_credentials,
|
|
104
110
|
cluster_auth=cluster_auth,
|
|
111
|
+
artifacts_local=artifacts_local,
|
|
112
|
+
artifacts_remote=artifacts_remote,
|
|
105
113
|
)
|
|
114
|
+
return self
|
|
106
115
|
|
|
107
116
|
def stop(self) -> None:
|
|
108
117
|
return self._internal_job.stop()
|
lightning_sdk/job/v1.py
CHANGED
|
@@ -69,6 +69,8 @@ class _JobV1(_BaseJob):
|
|
|
69
69
|
cluster: Optional[str] = None,
|
|
70
70
|
image_credentials: Optional[str] = None,
|
|
71
71
|
cluster_auth: bool = False,
|
|
72
|
+
artifacts_local: Optional[str] = None,
|
|
73
|
+
artifacts_remote: Optional[str] = None,
|
|
72
74
|
) -> None:
|
|
73
75
|
if studio is None:
|
|
74
76
|
raise ValueError("Studio is required for submitting jobs")
|
|
@@ -76,6 +78,9 @@ class _JobV1(_BaseJob):
|
|
|
76
78
|
if image is not None or image_credentials is not None or cluster_auth:
|
|
77
79
|
raise ValueError("Image is not supported for submitting jobs")
|
|
78
80
|
|
|
81
|
+
if artifacts_local is not None or artifacts_remote is not None:
|
|
82
|
+
raise ValueError("Specifying how to persist artifacts is not yet supported with jobs")
|
|
83
|
+
|
|
79
84
|
if env is not None:
|
|
80
85
|
raise ValueError("Environment variables are not supported for submitting jobs")
|
|
81
86
|
|
lightning_sdk/job/v2.py
CHANGED
|
@@ -36,6 +36,8 @@ class _JobV2(_BaseJob):
|
|
|
36
36
|
cluster: Optional[str] = None,
|
|
37
37
|
image_credentials: Optional[str] = None,
|
|
38
38
|
cluster_auth: bool = False,
|
|
39
|
+
artifacts_local: Optional[str] = None,
|
|
40
|
+
artifacts_remote: Optional[str] = None,
|
|
39
41
|
) -> None:
|
|
40
42
|
# Command is required if Studio is provided to know what to run
|
|
41
43
|
# Image is mutually exclusive with Studio
|
|
@@ -66,6 +68,8 @@ class _JobV2(_BaseJob):
|
|
|
66
68
|
env=env,
|
|
67
69
|
image_credentials=image_credentials,
|
|
68
70
|
cluster_auth=cluster_auth,
|
|
71
|
+
artifacts_local=artifacts_local,
|
|
72
|
+
artifacts_remote=artifacts_remote,
|
|
69
73
|
)
|
|
70
74
|
self._job = submitted
|
|
71
75
|
self._name = submitted.name
|
|
@@ -112,10 +116,18 @@ class _JobV2(_BaseJob):
|
|
|
112
116
|
|
|
113
117
|
@property
|
|
114
118
|
def artifact_path(self) -> Optional[str]:
|
|
119
|
+
if self._guaranteed_job.spec.image != "":
|
|
120
|
+
if self._guaranteed_job.spec.artifacts_destination != "":
|
|
121
|
+
splits = self._guaranteed_job.spec.artifacts_destination.split(":")
|
|
122
|
+
return f"/teamspace/{splits[0]}_connections/{splits[1]}/{splits[2]}"
|
|
123
|
+
return None
|
|
124
|
+
|
|
115
125
|
return f"/teamspace/jobs/{self._guaranteed_job.name}/artifacts"
|
|
116
126
|
|
|
117
127
|
@property
|
|
118
128
|
def snapshot_path(self) -> Optional[str]:
|
|
129
|
+
if self._guaranteed_job.spec.image != "":
|
|
130
|
+
return None
|
|
119
131
|
return f"/teamspace/jobs/{self._guaranteed_job.name}/snapshot"
|
|
120
132
|
|
|
121
133
|
@property
|
|
@@ -716,6 +716,7 @@ class DataConnectionServiceApi(object):
|
|
|
716
716
|
:param async_req bool
|
|
717
717
|
:param str project_id: (required)
|
|
718
718
|
:param str cluster_id:
|
|
719
|
+
:param list[str] state:
|
|
719
720
|
:return: V1ListDataConnectionsResponse
|
|
720
721
|
If the method is called asynchronously,
|
|
721
722
|
returns the request thread.
|
|
@@ -738,12 +739,13 @@ class DataConnectionServiceApi(object):
|
|
|
738
739
|
:param async_req bool
|
|
739
740
|
:param str project_id: (required)
|
|
740
741
|
:param str cluster_id:
|
|
742
|
+
:param list[str] state:
|
|
741
743
|
:return: V1ListDataConnectionsResponse
|
|
742
744
|
If the method is called asynchronously,
|
|
743
745
|
returns the request thread.
|
|
744
746
|
"""
|
|
745
747
|
|
|
746
|
-
all_params = ['project_id', 'cluster_id'] # noqa: E501
|
|
748
|
+
all_params = ['project_id', 'cluster_id', 'state'] # noqa: E501
|
|
747
749
|
all_params.append('async_req')
|
|
748
750
|
all_params.append('_return_http_data_only')
|
|
749
751
|
all_params.append('_preload_content')
|
|
@@ -772,6 +774,9 @@ class DataConnectionServiceApi(object):
|
|
|
772
774
|
query_params = []
|
|
773
775
|
if 'cluster_id' in params:
|
|
774
776
|
query_params.append(('clusterId', params['cluster_id'])) # noqa: E501
|
|
777
|
+
if 'state' in params:
|
|
778
|
+
query_params.append(('state', params['state'])) # noqa: E501
|
|
779
|
+
collection_formats['state'] = 'multi' # noqa: E501
|
|
775
780
|
|
|
776
781
|
header_params = {}
|
|
777
782
|
|
|
@@ -1197,6 +1197,8 @@ class ModelsStoreApi(object):
|
|
|
1197
1197
|
:param str name:
|
|
1198
1198
|
:param str version:
|
|
1199
1199
|
:param str project_id:
|
|
1200
|
+
:param str project_name:
|
|
1201
|
+
:param str project_owner_name:
|
|
1200
1202
|
:return: V1GetModelFilesResponse
|
|
1201
1203
|
If the method is called asynchronously,
|
|
1202
1204
|
returns the request thread.
|
|
@@ -1220,12 +1222,14 @@ class ModelsStoreApi(object):
|
|
|
1220
1222
|
:param str name:
|
|
1221
1223
|
:param str version:
|
|
1222
1224
|
:param str project_id:
|
|
1225
|
+
:param str project_name:
|
|
1226
|
+
:param str project_owner_name:
|
|
1223
1227
|
:return: V1GetModelFilesResponse
|
|
1224
1228
|
If the method is called asynchronously,
|
|
1225
1229
|
returns the request thread.
|
|
1226
1230
|
"""
|
|
1227
1231
|
|
|
1228
|
-
all_params = ['name', 'version', 'project_id'] # noqa: E501
|
|
1232
|
+
all_params = ['name', 'version', 'project_id', 'project_name', 'project_owner_name'] # noqa: E501
|
|
1229
1233
|
all_params.append('async_req')
|
|
1230
1234
|
all_params.append('_return_http_data_only')
|
|
1231
1235
|
all_params.append('_preload_content')
|
|
@@ -1252,6 +1256,10 @@ class ModelsStoreApi(object):
|
|
|
1252
1256
|
query_params.append(('version', params['version'])) # noqa: E501
|
|
1253
1257
|
if 'project_id' in params:
|
|
1254
1258
|
query_params.append(('projectId', params['project_id'])) # noqa: E501
|
|
1259
|
+
if 'project_name' in params:
|
|
1260
|
+
query_params.append(('projectName', params['project_name'])) # noqa: E501
|
|
1261
|
+
if 'project_owner_name' in params:
|
|
1262
|
+
query_params.append(('projectOwnerName', params['project_owner_name'])) # noqa: E501
|
|
1255
1263
|
|
|
1256
1264
|
header_params = {}
|
|
1257
1265
|
|
|
@@ -1282,6 +1290,115 @@ class ModelsStoreApi(object):
|
|
|
1282
1290
|
_request_timeout=params.get('_request_timeout'),
|
|
1283
1291
|
collection_formats=collection_formats)
|
|
1284
1292
|
|
|
1293
|
+
def models_store_get_model_version(self, project_id: 'str', model_id: 'str', version: 'str', **kwargs) -> 'V1ModelVersionArchive': # noqa: E501
|
|
1294
|
+
"""GetModelVersion used to get specific model version details # noqa: E501
|
|
1295
|
+
|
|
1296
|
+
This method makes a synchronous HTTP request by default. To make an
|
|
1297
|
+
asynchronous HTTP request, please pass async_req=True
|
|
1298
|
+
>>> thread = api.models_store_get_model_version(project_id, model_id, version, async_req=True)
|
|
1299
|
+
>>> result = thread.get()
|
|
1300
|
+
|
|
1301
|
+
:param async_req bool
|
|
1302
|
+
:param str project_id: (required)
|
|
1303
|
+
:param str model_id: (required)
|
|
1304
|
+
:param str version: (required)
|
|
1305
|
+
:return: V1ModelVersionArchive
|
|
1306
|
+
If the method is called asynchronously,
|
|
1307
|
+
returns the request thread.
|
|
1308
|
+
"""
|
|
1309
|
+
kwargs['_return_http_data_only'] = True
|
|
1310
|
+
if kwargs.get('async_req'):
|
|
1311
|
+
return self.models_store_get_model_version_with_http_info(project_id, model_id, version, **kwargs) # noqa: E501
|
|
1312
|
+
else:
|
|
1313
|
+
(data) = self.models_store_get_model_version_with_http_info(project_id, model_id, version, **kwargs) # noqa: E501
|
|
1314
|
+
return data
|
|
1315
|
+
|
|
1316
|
+
def models_store_get_model_version_with_http_info(self, project_id: 'str', model_id: 'str', version: 'str', **kwargs) -> 'V1ModelVersionArchive': # noqa: E501
|
|
1317
|
+
"""GetModelVersion used to get specific model version details # noqa: E501
|
|
1318
|
+
|
|
1319
|
+
This method makes a synchronous HTTP request by default. To make an
|
|
1320
|
+
asynchronous HTTP request, please pass async_req=True
|
|
1321
|
+
>>> thread = api.models_store_get_model_version_with_http_info(project_id, model_id, version, async_req=True)
|
|
1322
|
+
>>> result = thread.get()
|
|
1323
|
+
|
|
1324
|
+
:param async_req bool
|
|
1325
|
+
:param str project_id: (required)
|
|
1326
|
+
:param str model_id: (required)
|
|
1327
|
+
:param str version: (required)
|
|
1328
|
+
:return: V1ModelVersionArchive
|
|
1329
|
+
If the method is called asynchronously,
|
|
1330
|
+
returns the request thread.
|
|
1331
|
+
"""
|
|
1332
|
+
|
|
1333
|
+
all_params = ['project_id', 'model_id', 'version'] # noqa: E501
|
|
1334
|
+
all_params.append('async_req')
|
|
1335
|
+
all_params.append('_return_http_data_only')
|
|
1336
|
+
all_params.append('_preload_content')
|
|
1337
|
+
all_params.append('_request_timeout')
|
|
1338
|
+
|
|
1339
|
+
params = locals()
|
|
1340
|
+
for key, val in six.iteritems(params['kwargs']):
|
|
1341
|
+
if key not in all_params:
|
|
1342
|
+
raise TypeError(
|
|
1343
|
+
"Got an unexpected keyword argument '%s'"
|
|
1344
|
+
" to method models_store_get_model_version" % key
|
|
1345
|
+
)
|
|
1346
|
+
params[key] = val
|
|
1347
|
+
del params['kwargs']
|
|
1348
|
+
# verify the required parameter 'project_id' is set
|
|
1349
|
+
if ('project_id' not in params or
|
|
1350
|
+
params['project_id'] is None):
|
|
1351
|
+
raise ValueError("Missing the required parameter `project_id` when calling `models_store_get_model_version`") # noqa: E501
|
|
1352
|
+
# verify the required parameter 'model_id' is set
|
|
1353
|
+
if ('model_id' not in params or
|
|
1354
|
+
params['model_id'] is None):
|
|
1355
|
+
raise ValueError("Missing the required parameter `model_id` when calling `models_store_get_model_version`") # noqa: E501
|
|
1356
|
+
# verify the required parameter 'version' is set
|
|
1357
|
+
if ('version' not in params or
|
|
1358
|
+
params['version'] is None):
|
|
1359
|
+
raise ValueError("Missing the required parameter `version` when calling `models_store_get_model_version`") # noqa: E501
|
|
1360
|
+
|
|
1361
|
+
collection_formats = {}
|
|
1362
|
+
|
|
1363
|
+
path_params = {}
|
|
1364
|
+
if 'project_id' in params:
|
|
1365
|
+
path_params['projectId'] = params['project_id'] # noqa: E501
|
|
1366
|
+
if 'model_id' in params:
|
|
1367
|
+
path_params['modelId'] = params['model_id'] # noqa: E501
|
|
1368
|
+
if 'version' in params:
|
|
1369
|
+
path_params['version'] = params['version'] # noqa: E501
|
|
1370
|
+
|
|
1371
|
+
query_params = []
|
|
1372
|
+
|
|
1373
|
+
header_params = {}
|
|
1374
|
+
|
|
1375
|
+
form_params = []
|
|
1376
|
+
local_var_files = {}
|
|
1377
|
+
|
|
1378
|
+
body_params = None
|
|
1379
|
+
# HTTP header `Accept`
|
|
1380
|
+
header_params['Accept'] = self.api_client.select_header_accept(
|
|
1381
|
+
['application/json']) # noqa: E501
|
|
1382
|
+
|
|
1383
|
+
# Authentication setting
|
|
1384
|
+
auth_settings = [] # noqa: E501
|
|
1385
|
+
|
|
1386
|
+
return self.api_client.call_api(
|
|
1387
|
+
'/v1/projects/{projectId}/models/{modelId}/versions/{version}', 'GET',
|
|
1388
|
+
path_params,
|
|
1389
|
+
query_params,
|
|
1390
|
+
header_params,
|
|
1391
|
+
body=body_params,
|
|
1392
|
+
post_params=form_params,
|
|
1393
|
+
files=local_var_files,
|
|
1394
|
+
response_type='V1ModelVersionArchive', # noqa: E501
|
|
1395
|
+
auth_settings=auth_settings,
|
|
1396
|
+
async_req=params.get('async_req'),
|
|
1397
|
+
_return_http_data_only=params.get('_return_http_data_only'),
|
|
1398
|
+
_preload_content=params.get('_preload_content', True),
|
|
1399
|
+
_request_timeout=params.get('_request_timeout'),
|
|
1400
|
+
collection_formats=collection_formats)
|
|
1401
|
+
|
|
1285
1402
|
def models_store_list_model_versions(self, project_id: 'str', model_id: 'str', **kwargs) -> 'V1ListModelVersionsResponse': # noqa: E501
|
|
1286
1403
|
"""models_store_list_model_versions # noqa: E501
|
|
1287
1404
|
|
|
@@ -41,19 +41,24 @@ class IdStartBody(object):
|
|
|
41
41
|
and the value is json key in definition.
|
|
42
42
|
"""
|
|
43
43
|
swagger_types = {
|
|
44
|
-
'compute_config': 'V1UserRequestedComputeConfig'
|
|
44
|
+
'compute_config': 'V1UserRequestedComputeConfig',
|
|
45
|
+
'is_forked': 'bool'
|
|
45
46
|
}
|
|
46
47
|
|
|
47
48
|
attribute_map = {
|
|
48
|
-
'compute_config': 'computeConfig'
|
|
49
|
+
'compute_config': 'computeConfig',
|
|
50
|
+
'is_forked': 'isForked'
|
|
49
51
|
}
|
|
50
52
|
|
|
51
|
-
def __init__(self, compute_config: 'V1UserRequestedComputeConfig' =None): # noqa: E501
|
|
53
|
+
def __init__(self, compute_config: 'V1UserRequestedComputeConfig' =None, is_forked: 'bool' =None): # noqa: E501
|
|
52
54
|
"""IdStartBody - a model defined in Swagger""" # noqa: E501
|
|
53
55
|
self._compute_config = None
|
|
56
|
+
self._is_forked = None
|
|
54
57
|
self.discriminator = None
|
|
55
58
|
if compute_config is not None:
|
|
56
59
|
self.compute_config = compute_config
|
|
60
|
+
if is_forked is not None:
|
|
61
|
+
self.is_forked = is_forked
|
|
57
62
|
|
|
58
63
|
@property
|
|
59
64
|
def compute_config(self) -> 'V1UserRequestedComputeConfig':
|
|
@@ -76,6 +81,27 @@ class IdStartBody(object):
|
|
|
76
81
|
|
|
77
82
|
self._compute_config = compute_config
|
|
78
83
|
|
|
84
|
+
@property
|
|
85
|
+
def is_forked(self) -> 'bool':
|
|
86
|
+
"""Gets the is_forked of this IdStartBody. # noqa: E501
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
:return: The is_forked of this IdStartBody. # noqa: E501
|
|
90
|
+
:rtype: bool
|
|
91
|
+
"""
|
|
92
|
+
return self._is_forked
|
|
93
|
+
|
|
94
|
+
@is_forked.setter
|
|
95
|
+
def is_forked(self, is_forked: 'bool'):
|
|
96
|
+
"""Sets the is_forked of this IdStartBody.
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
:param is_forked: The is_forked of this IdStartBody. # noqa: E501
|
|
100
|
+
:type: bool
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
self._is_forked = is_forked
|
|
104
|
+
|
|
79
105
|
def to_dict(self) -> dict:
|
|
80
106
|
"""Returns the model properties as a dict"""
|
|
81
107
|
result = {}
|
|
@@ -50,6 +50,7 @@ class ProjectIdCloudspacesBody(object):
|
|
|
50
50
|
'disk_size': 'str',
|
|
51
51
|
'display_name': 'str',
|
|
52
52
|
'name': 'str',
|
|
53
|
+
'plugins': 'list[str]',
|
|
53
54
|
'requested_run_duration_seconds': 'str',
|
|
54
55
|
'seed_files': 'list[V1CloudSpaceSeedFile]',
|
|
55
56
|
'spot': 'bool'
|
|
@@ -65,12 +66,13 @@ class ProjectIdCloudspacesBody(object):
|
|
|
65
66
|
'disk_size': 'diskSize',
|
|
66
67
|
'display_name': 'displayName',
|
|
67
68
|
'name': 'name',
|
|
69
|
+
'plugins': 'plugins',
|
|
68
70
|
'requested_run_duration_seconds': 'requestedRunDurationSeconds',
|
|
69
71
|
'seed_files': 'seedFiles',
|
|
70
72
|
'spot': 'spot'
|
|
71
73
|
}
|
|
72
74
|
|
|
73
|
-
def __init__(self, can_download_source_code: 'bool' =None, cloud_space_instance_cpu_image_override: 'str' =None, cloud_space_instance_gpu_image_override: 'str' =None, cluster_id: 'str' =None, compute_name: 'str' =None, data_connection_mounts: 'list[V1DataConnectionMount]' =None, disk_size: 'str' =None, display_name: 'str' =None, name: 'str' =None, requested_run_duration_seconds: 'str' =None, seed_files: 'list[V1CloudSpaceSeedFile]' =None, spot: 'bool' =None): # noqa: E501
|
|
75
|
+
def __init__(self, can_download_source_code: 'bool' =None, cloud_space_instance_cpu_image_override: 'str' =None, cloud_space_instance_gpu_image_override: 'str' =None, cluster_id: 'str' =None, compute_name: 'str' =None, data_connection_mounts: 'list[V1DataConnectionMount]' =None, disk_size: 'str' =None, display_name: 'str' =None, name: 'str' =None, plugins: 'list[str]' =None, requested_run_duration_seconds: 'str' =None, seed_files: 'list[V1CloudSpaceSeedFile]' =None, spot: 'bool' =None): # noqa: E501
|
|
74
76
|
"""ProjectIdCloudspacesBody - a model defined in Swagger""" # noqa: E501
|
|
75
77
|
self._can_download_source_code = None
|
|
76
78
|
self._cloud_space_instance_cpu_image_override = None
|
|
@@ -81,6 +83,7 @@ class ProjectIdCloudspacesBody(object):
|
|
|
81
83
|
self._disk_size = None
|
|
82
84
|
self._display_name = None
|
|
83
85
|
self._name = None
|
|
86
|
+
self._plugins = None
|
|
84
87
|
self._requested_run_duration_seconds = None
|
|
85
88
|
self._seed_files = None
|
|
86
89
|
self._spot = None
|
|
@@ -103,6 +106,8 @@ class ProjectIdCloudspacesBody(object):
|
|
|
103
106
|
self.display_name = display_name
|
|
104
107
|
if name is not None:
|
|
105
108
|
self.name = name
|
|
109
|
+
if plugins is not None:
|
|
110
|
+
self.plugins = plugins
|
|
106
111
|
if requested_run_duration_seconds is not None:
|
|
107
112
|
self.requested_run_duration_seconds = requested_run_duration_seconds
|
|
108
113
|
if seed_files is not None:
|
|
@@ -299,6 +304,27 @@ class ProjectIdCloudspacesBody(object):
|
|
|
299
304
|
|
|
300
305
|
self._name = name
|
|
301
306
|
|
|
307
|
+
@property
|
|
308
|
+
def plugins(self) -> 'list[str]':
|
|
309
|
+
"""Gets the plugins of this ProjectIdCloudspacesBody. # noqa: E501
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
:return: The plugins of this ProjectIdCloudspacesBody. # noqa: E501
|
|
313
|
+
:rtype: list[str]
|
|
314
|
+
"""
|
|
315
|
+
return self._plugins
|
|
316
|
+
|
|
317
|
+
@plugins.setter
|
|
318
|
+
def plugins(self, plugins: 'list[str]'):
|
|
319
|
+
"""Sets the plugins of this ProjectIdCloudspacesBody.
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
:param plugins: The plugins of this ProjectIdCloudspacesBody. # noqa: E501
|
|
323
|
+
:type: list[str]
|
|
324
|
+
"""
|
|
325
|
+
|
|
326
|
+
self._plugins = plugins
|
|
327
|
+
|
|
302
328
|
@property
|
|
303
329
|
def requested_run_duration_seconds(self) -> 'str':
|
|
304
330
|
"""Gets the requested_run_duration_seconds of this ProjectIdCloudspacesBody. # noqa: E501
|