proximl 0.5.4__py3-none-any.whl → 0.5.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- proximl/__init__.py +1 -1
- proximl/cli/__init__.py +3 -6
- proximl/cli/volume.py +235 -0
- proximl/exceptions.py +21 -12
- proximl/jobs.py +36 -39
- proximl/proximl.py +7 -15
- proximl/volumes.py +255 -0
- {proximl-0.5.4.dist-info → proximl-0.5.5.dist-info}/METADATA +1 -1
- {proximl-0.5.4.dist-info → proximl-0.5.5.dist-info}/RECORD +22 -17
- tests/integration/test_checkpoints_integration.py +7 -5
- tests/integration/test_datasets_integration.py +4 -5
- tests/integration/test_jobs_integration.py +40 -2
- tests/integration/test_models_integration.py +8 -10
- tests/integration/test_projects_integration.py +2 -6
- tests/integration/test_volumes_integration.py +100 -0
- tests/unit/cli/test_cli_volume_unit.py +20 -0
- tests/unit/conftest.py +82 -9
- tests/unit/test_volumes_unit.py +447 -0
- {proximl-0.5.4.dist-info → proximl-0.5.5.dist-info}/LICENSE +0 -0
- {proximl-0.5.4.dist-info → proximl-0.5.5.dist-info}/WHEEL +0 -0
- {proximl-0.5.4.dist-info → proximl-0.5.5.dist-info}/entry_points.txt +0 -0
- {proximl-0.5.4.dist-info → proximl-0.5.5.dist-info}/top_level.txt +0 -0
proximl/volumes.py
ADDED
|
@@ -0,0 +1,255 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import math
|
|
4
|
+
import asyncio
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
|
|
7
|
+
from .exceptions import (
|
|
8
|
+
VolumeError,
|
|
9
|
+
ApiError,
|
|
10
|
+
SpecificationError,
|
|
11
|
+
ProxiMLException,
|
|
12
|
+
)
|
|
13
|
+
from .connections import Connection
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Volumes(object):
|
|
17
|
+
def __init__(self, proximl):
|
|
18
|
+
self.proximl = proximl
|
|
19
|
+
|
|
20
|
+
async def get(self, id, **kwargs):
|
|
21
|
+
resp = await self.proximl._query(f"/volume/{id}", "GET", kwargs)
|
|
22
|
+
return Volume(self.proximl, **resp)
|
|
23
|
+
|
|
24
|
+
async def list(self, **kwargs):
|
|
25
|
+
resp = await self.proximl._query(f"/volume", "GET", kwargs)
|
|
26
|
+
volumes = [Volume(self.proximl, **volume) for volume in resp]
|
|
27
|
+
return volumes
|
|
28
|
+
|
|
29
|
+
async def create(self, name, source_type, source_uri, capacity, **kwargs):
|
|
30
|
+
data = dict(
|
|
31
|
+
name=name,
|
|
32
|
+
source_type=source_type,
|
|
33
|
+
source_uri=source_uri,
|
|
34
|
+
capacity=capacity,
|
|
35
|
+
source_options=kwargs.get("source_options"),
|
|
36
|
+
project_uuid=kwargs.get("project_uuid") or self.proximl.active_project,
|
|
37
|
+
)
|
|
38
|
+
payload = {k: v for k, v in data.items() if v is not None}
|
|
39
|
+
logging.info(f"Creating Volume {name}")
|
|
40
|
+
resp = await self.proximl._query("/volume", "POST", None, payload)
|
|
41
|
+
volume = Volume(self.proximl, **resp)
|
|
42
|
+
logging.info(f"Created Volume {name} with id {volume.id}")
|
|
43
|
+
|
|
44
|
+
return volume
|
|
45
|
+
|
|
46
|
+
async def remove(self, id, **kwargs):
|
|
47
|
+
await self.proximl._query(f"/volume/{id}", "DELETE", dict(**kwargs, force=True))
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class Volume:
|
|
51
|
+
def __init__(self, proximl, **kwargs):
|
|
52
|
+
self.proximl = proximl
|
|
53
|
+
self._volume = kwargs
|
|
54
|
+
self._id = self._volume.get("id", self._volume.get("id"))
|
|
55
|
+
self._status = self._volume.get("status")
|
|
56
|
+
self._name = self._volume.get("name")
|
|
57
|
+
self._capacity = self._volume.get("capacity")
|
|
58
|
+
self._used_size = self._volume.get("used_size")
|
|
59
|
+
self._billed_size = self._volume.get("billed_size")
|
|
60
|
+
self._project_uuid = self._volume.get("project_uuid")
|
|
61
|
+
|
|
62
|
+
@property
|
|
63
|
+
def id(self) -> str:
|
|
64
|
+
return self._id
|
|
65
|
+
|
|
66
|
+
@property
|
|
67
|
+
def status(self) -> str:
|
|
68
|
+
return self._status
|
|
69
|
+
|
|
70
|
+
@property
|
|
71
|
+
def name(self) -> str:
|
|
72
|
+
return self._name
|
|
73
|
+
|
|
74
|
+
@property
|
|
75
|
+
def capacity(self) -> str:
|
|
76
|
+
return self._capacity
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def used_size(self) -> int:
|
|
80
|
+
return self._used_size
|
|
81
|
+
|
|
82
|
+
@property
|
|
83
|
+
def billed_size(self) -> int:
|
|
84
|
+
return self._billed_size
|
|
85
|
+
|
|
86
|
+
def __str__(self):
|
|
87
|
+
return json.dumps({k: v for k, v in self._volume.items()})
|
|
88
|
+
|
|
89
|
+
def __repr__(self):
|
|
90
|
+
return f"Volume( proximl , **{self._volume.__repr__()})"
|
|
91
|
+
|
|
92
|
+
def __bool__(self):
|
|
93
|
+
return bool(self._id)
|
|
94
|
+
|
|
95
|
+
async def get_log_url(self):
|
|
96
|
+
resp = await self.proximl._query(
|
|
97
|
+
f"/volume/{self._id}/logs",
|
|
98
|
+
"GET",
|
|
99
|
+
dict(project_uuid=self._project_uuid),
|
|
100
|
+
)
|
|
101
|
+
return resp
|
|
102
|
+
|
|
103
|
+
async def get_details(self):
|
|
104
|
+
resp = await self.proximl._query(
|
|
105
|
+
f"/volume/{self._id}/details",
|
|
106
|
+
"GET",
|
|
107
|
+
dict(project_uuid=self._project_uuid),
|
|
108
|
+
)
|
|
109
|
+
return resp
|
|
110
|
+
|
|
111
|
+
async def get_connection_utility_url(self):
|
|
112
|
+
resp = await self.proximl._query(
|
|
113
|
+
f"/volume/{self._id}/download",
|
|
114
|
+
"GET",
|
|
115
|
+
dict(project_uuid=self._project_uuid),
|
|
116
|
+
)
|
|
117
|
+
return resp
|
|
118
|
+
|
|
119
|
+
def get_connection_details(self):
|
|
120
|
+
if self._volume.get("vpn"):
|
|
121
|
+
details = dict(
|
|
122
|
+
entity_type="volume",
|
|
123
|
+
project_uuid=self._volume.get("project_uuid"),
|
|
124
|
+
cidr=self._volume.get("vpn").get("cidr"),
|
|
125
|
+
ssh_port=self._volume.get("vpn").get("client").get("ssh_port"),
|
|
126
|
+
input_path=(
|
|
127
|
+
self._volume.get("source_uri")
|
|
128
|
+
if self.status in ["new", "downloading"]
|
|
129
|
+
else None
|
|
130
|
+
),
|
|
131
|
+
output_path=(
|
|
132
|
+
self._volume.get("output_uri")
|
|
133
|
+
if self.status == "exporting"
|
|
134
|
+
else None
|
|
135
|
+
),
|
|
136
|
+
)
|
|
137
|
+
else:
|
|
138
|
+
details = dict()
|
|
139
|
+
return details
|
|
140
|
+
|
|
141
|
+
async def connect(self):
|
|
142
|
+
if self.status in ["ready", "failed"]:
|
|
143
|
+
raise SpecificationError(
|
|
144
|
+
"status",
|
|
145
|
+
f"You can only connect to downloading or exporting volumes.",
|
|
146
|
+
)
|
|
147
|
+
if self.status == "new":
|
|
148
|
+
await self.wait_for("downloading")
|
|
149
|
+
connection = Connection(
|
|
150
|
+
self.proximl, entity_type="volume", id=self.id, entity=self
|
|
151
|
+
)
|
|
152
|
+
await connection.start()
|
|
153
|
+
return connection.status
|
|
154
|
+
|
|
155
|
+
async def disconnect(self):
|
|
156
|
+
connection = Connection(
|
|
157
|
+
self.proximl, entity_type="volume", id=self.id, entity=self
|
|
158
|
+
)
|
|
159
|
+
await connection.stop()
|
|
160
|
+
return connection.status
|
|
161
|
+
|
|
162
|
+
async def remove(self, force=False):
|
|
163
|
+
await self.proximl._query(
|
|
164
|
+
f"/volume/{self._id}",
|
|
165
|
+
"DELETE",
|
|
166
|
+
dict(project_uuid=self._project_uuid, force=force),
|
|
167
|
+
)
|
|
168
|
+
|
|
169
|
+
async def rename(self, name):
|
|
170
|
+
resp = await self.proximl._query(
|
|
171
|
+
f"/volume/{self._id}",
|
|
172
|
+
"PATCH",
|
|
173
|
+
dict(project_uuid=self._project_uuid),
|
|
174
|
+
dict(name=name),
|
|
175
|
+
)
|
|
176
|
+
self.__init__(self.proximl, **resp)
|
|
177
|
+
return self
|
|
178
|
+
|
|
179
|
+
async def export(self, output_type, output_uri, output_options=dict()):
|
|
180
|
+
resp = await self.proximl._query(
|
|
181
|
+
f"/volume/{self._id}/export",
|
|
182
|
+
"POST",
|
|
183
|
+
dict(project_uuid=self._project_uuid),
|
|
184
|
+
dict(
|
|
185
|
+
output_type=output_type,
|
|
186
|
+
output_uri=output_uri,
|
|
187
|
+
output_options=output_options,
|
|
188
|
+
),
|
|
189
|
+
)
|
|
190
|
+
self.__init__(self.proximl, **resp)
|
|
191
|
+
return self
|
|
192
|
+
|
|
193
|
+
def _get_msg_handler(self, msg_handler):
|
|
194
|
+
def handler(data):
|
|
195
|
+
if data.get("type") == "subscription":
|
|
196
|
+
if msg_handler:
|
|
197
|
+
msg_handler(data)
|
|
198
|
+
else:
|
|
199
|
+
timestamp = datetime.fromtimestamp(int(data.get("time")) / 1000)
|
|
200
|
+
print(
|
|
201
|
+
f"{timestamp.strftime('%m/%d/%Y, %H:%M:%S')}: {data.get('msg').rstrip()}"
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
return handler
|
|
205
|
+
|
|
206
|
+
async def attach(self, msg_handler=None):
|
|
207
|
+
await self.refresh()
|
|
208
|
+
if self.status not in ["ready", "failed"]:
|
|
209
|
+
await self.proximl._ws_subscribe(
|
|
210
|
+
"volume",
|
|
211
|
+
self._project_uuid,
|
|
212
|
+
self.id,
|
|
213
|
+
self._get_msg_handler(msg_handler),
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
async def refresh(self):
|
|
217
|
+
resp = await self.proximl._query(
|
|
218
|
+
f"/volume/{self.id}",
|
|
219
|
+
"GET",
|
|
220
|
+
dict(project_uuid=self._project_uuid),
|
|
221
|
+
)
|
|
222
|
+
self.__init__(self.proximl, **resp)
|
|
223
|
+
return self
|
|
224
|
+
|
|
225
|
+
async def wait_for(self, status, timeout=300):
|
|
226
|
+
valid_statuses = ["downloading", "ready", "archived"]
|
|
227
|
+
if not status in valid_statuses:
|
|
228
|
+
raise SpecificationError(
|
|
229
|
+
"status",
|
|
230
|
+
f"Invalid wait_for status {status}. Valid statuses are: {valid_statuses}",
|
|
231
|
+
)
|
|
232
|
+
if self.status == status:
|
|
233
|
+
return
|
|
234
|
+
POLL_INTERVAL_MIN = 5
|
|
235
|
+
POLL_INTERVAL_MAX = 60
|
|
236
|
+
POLL_INTERVAL = max(min(timeout / 60, POLL_INTERVAL_MAX), POLL_INTERVAL_MIN)
|
|
237
|
+
retry_count = math.ceil(timeout / POLL_INTERVAL)
|
|
238
|
+
count = 0
|
|
239
|
+
while count < retry_count:
|
|
240
|
+
await asyncio.sleep(POLL_INTERVAL)
|
|
241
|
+
try:
|
|
242
|
+
await self.refresh()
|
|
243
|
+
except ApiError as e:
|
|
244
|
+
if status == "archived" and e.status == 404:
|
|
245
|
+
return
|
|
246
|
+
raise e
|
|
247
|
+
if self.status == status:
|
|
248
|
+
return self
|
|
249
|
+
elif self.status == "failed":
|
|
250
|
+
raise VolumeError(self.status, self)
|
|
251
|
+
else:
|
|
252
|
+
count += 1
|
|
253
|
+
logging.debug(f"self: {self}, retry count {count}")
|
|
254
|
+
|
|
255
|
+
raise ProxiMLException(f"Timeout waiting for {status}")
|
|
@@ -2,20 +2,21 @@ examples/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
2
2
|
examples/create_dataset_and_training_job.py,sha256=Fqueoz2KD1MTxnU960iqHUdxvo68Xe64HT5XS55lI9w,1178
|
|
3
3
|
examples/local_storage.py,sha256=6K6LMO7ZPI7N2KdBcgqXSvdsqJfjISzN4yRO9YrJqbA,1745
|
|
4
4
|
examples/training_inference_pipeline.py,sha256=pxux0QUUtRXxKj2rX-6fPEKBIi43mhRd1zJ-Lf1ZJGI,2334
|
|
5
|
-
proximl/__init__.py,sha256=
|
|
5
|
+
proximl/__init__.py,sha256=3gq9ZsAAIsysDGCFRY4rF-eaMA6i7uPpZDDQ39rjBKM,432
|
|
6
6
|
proximl/__main__.py,sha256=JgErYkiskih8Y6oRwowALtR-rwQhAAdqOYWjQraRIPI,59
|
|
7
7
|
proximl/auth.py,sha256=LacGBDAVel5HcJx7JXp1wVn3s0gZc7nf--vDfSFOXYU,26565
|
|
8
8
|
proximl/checkpoints.py,sha256=JPSq9iTSb6dBu0JFKJZgMqDBt9kSGxfl2OaxKehk4QY,8274
|
|
9
9
|
proximl/connections.py,sha256=0C8VJSkDturQVlzg_3yAkQz8W9NAmgzDCBRK_SrzeUI,20035
|
|
10
10
|
proximl/datasets.py,sha256=RB7IiDkWZPuwFw1o0xKQn10HS_3ui_BZf7tVlAvCIHA,7935
|
|
11
11
|
proximl/environments.py,sha256=L_cRmau1wJxpGvnJAqgms-GiNdDhiuOntrlBqsdoE3A,1507
|
|
12
|
-
proximl/exceptions.py,sha256=
|
|
12
|
+
proximl/exceptions.py,sha256=3q8qj-sahwEEHeYyjY89GrytVAgHDApN8HSCB3dhnf4,4094
|
|
13
13
|
proximl/gpu_types.py,sha256=V-EZzE-hDLi5eVQ2_9yGLTm8-Qk1AnnzctfSVC44yLY,1901
|
|
14
|
-
proximl/jobs.py,sha256=
|
|
14
|
+
proximl/jobs.py,sha256=kQbC9ojrMEvRAysFfftwLdVtNDjpiFBfji4bz1To82E,17838
|
|
15
15
|
proximl/models.py,sha256=QLtkIoznww5cZE_Eqoy7IaEgCEfNbJOe_21hdoExO-k,7750
|
|
16
16
|
proximl/projects.py,sha256=87Fsce8BkPJLN_cEsZ3FLequpBgmd8WtjY7Vs7uZkO4,5209
|
|
17
|
-
proximl/proximl.py,sha256=
|
|
18
|
-
proximl/
|
|
17
|
+
proximl/proximl.py,sha256=yzKbJ7ak8YiVv6q7CEagPufECNU4YTNeul6N3OuyH1M,10864
|
|
18
|
+
proximl/volumes.py,sha256=VnlgTeQwoDKGpZRhMrSKtxe4EWzb4qQ-OdDyaJohH5Y,8094
|
|
19
|
+
proximl/cli/__init__.py,sha256=R_8ExAKQp67N2xtwGM00gtK3zSoWPGruHAP_XFLddSI,4346
|
|
19
20
|
proximl/cli/checkpoint.py,sha256=Iv1i1EAt2LJey2wy2ioQ6-ZysqwRG4kFj0lnE6INZCM,7170
|
|
20
21
|
proximl/cli/connection.py,sha256=YiWqRIB9ZfTl30DjDFaJEpXuDrA-Ldl9PEzFFdZ_hFI,1700
|
|
21
22
|
proximl/cli/dataset.py,sha256=ueoeicBY8aMLpvpKUIBICnS9GsEnDOj7ZlFmOfjjY4c,6871
|
|
@@ -23,6 +24,7 @@ proximl/cli/environment.py,sha256=nh7oYbG5oOrZEpZkMkKgvzFXmQJWnFTMw1-YbuvkdFU,10
|
|
|
23
24
|
proximl/cli/gpu.py,sha256=xL8eqM5ca_Ueaj8cWit1iKn34KhaR0StrubVeRU2YQY,883
|
|
24
25
|
proximl/cli/model.py,sha256=xdjveIaRPK7MdfrnFygPEuwYRJRW9VqheZ-11XnXDcE,6111
|
|
25
26
|
proximl/cli/project.py,sha256=Er1twSiWQSAKir-hBIT9fRo2fc_UGqFoIJOwwjQGmlo,3522
|
|
27
|
+
proximl/cli/volume.py,sha256=uyIrKov4zwCjyLyZrEJYoEbIkS0zdU3xSyWZk2BM1kA,6246
|
|
26
28
|
proximl/cli/cloudbender/__init__.py,sha256=vxj62MyM3sC9h8M4ii3szH4s9JvEhicOQ0D0m7eNwPA,534
|
|
27
29
|
proximl/cli/cloudbender/datastore.py,sha256=_vQOj-NfrL_nj4HfxNJL63TJZjLgfDyztRLyaRU58v8,3478
|
|
28
30
|
proximl/cli/cloudbender/device.py,sha256=FdQZPESP6YBfUSzXq1Byu7eNMKi59qSOICONK-TEljI,3453
|
|
@@ -43,17 +45,18 @@ proximl/cloudbender/regions.py,sha256=Nu1LT6nuLD8Nt-5-7_FLlxDNZoDDAY6QduTdEBqfxJ
|
|
|
43
45
|
proximl/cloudbender/reservations.py,sha256=14ImJRLWQGG7CXDYhDnOI2W8pnP6CVVG2aVpysQVN0E,3586
|
|
44
46
|
tests/integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
45
47
|
tests/integration/conftest.py,sha256=zRWpherX-yfbpk7xqZk9bIZCyJ-dwVeszY_7kekn2M4,1134
|
|
46
|
-
tests/integration/test_checkpoints_integration.py,sha256=
|
|
47
|
-
tests/integration/test_datasets_integration.py,sha256=
|
|
48
|
+
tests/integration/test_checkpoints_integration.py,sha256=DUA6ZlX0gsnKCEvFmWCKbKkfJG3dQG6uD8T3fgo7y0M,3230
|
|
49
|
+
tests/integration/test_datasets_integration.py,sha256=Ndp8itnncDSjVH0t5BM5R_-_yL4qt6JrkQAVOTMi1R8,3499
|
|
48
50
|
tests/integration/test_environments_integration.py,sha256=7P6pKSyxA7rTwyNCD9HEaM2ablMG8WcBesOzkG-BgsQ,1403
|
|
49
51
|
tests/integration/test_gpu_types_integration.py,sha256=Zv-yrHcAgKau9BJQvzi92bdrRHhLPl_hbhhzNLEWJ9w,1256
|
|
50
|
-
tests/integration/test_jobs_integration.py,sha256=
|
|
51
|
-
tests/integration/test_models_integration.py,sha256=
|
|
52
|
-
tests/integration/test_projects_integration.py,sha256=
|
|
52
|
+
tests/integration/test_jobs_integration.py,sha256=fMqYfdzHOxUxtRGe0XZlsFhjUGoeo0Prxbq5_yQvroc,24730
|
|
53
|
+
tests/integration/test_models_integration.py,sha256=JFgX6V652iMobpNKfn0XdP9_TDg5pnVtG8lL6yrEOZk,2902
|
|
54
|
+
tests/integration/test_projects_integration.py,sha256=XdGGqvOQby77kMfoD_zAukKvN6tN7v1QydZ4hFp7u_s,1446
|
|
55
|
+
tests/integration/test_volumes_integration.py,sha256=Xo2Whw2U-7jyvESIkyex3f0SMXKlExe3kLmsbpXTHhQ,3270
|
|
53
56
|
tests/integration/cloudbender/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
54
57
|
tests/integration/cloudbender/test_providers_integration.py,sha256=gFqPQom-Cn1iZC50_ChQ2us2_f4tIPATQSAUcWdf7ss,1473
|
|
55
58
|
tests/unit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
56
|
-
tests/unit/conftest.py,sha256=
|
|
59
|
+
tests/unit/conftest.py,sha256=dilDp7ZSmyg9tJbkJoGcqUF07JBTdkNoULOR6qK7NsU,31197
|
|
57
60
|
tests/unit/test_auth.py,sha256=IJZHT5CZLNfu3MybTdEWIsKlvxNfFYpZ6oWYzbS856g,858
|
|
58
61
|
tests/unit/test_checkpoints_unit.py,sha256=WfFE6HvQHglBKfcJ3B0IYXdXpaNmcL2BDurRQmIbFAU,16008
|
|
59
62
|
tests/unit/test_connections_unit.py,sha256=LFAZzlrvL9oM8rZJTiC1oA9quw1KA2vMUCc3LV6SjXs,5507
|
|
@@ -65,6 +68,7 @@ tests/unit/test_jobs_unit.py,sha256=kCrm2FdogqYgji0Coe9KGaODYEsXmvuV46FN2mUE1Ts,
|
|
|
65
68
|
tests/unit/test_models_unit.py,sha256=wgwdvJLt6QZ_IKCvp6Kmt2Z4b_CvIN8KCuFqlBaxDe8,15064
|
|
66
69
|
tests/unit/test_projects_unit.py,sha256=_SwPFhy2Xl6HRt6w5_HYrqtZJHAzFzoCZ3kqAS1Zn3c,9530
|
|
67
70
|
tests/unit/test_proximl.py,sha256=E-er5V1O-4ZVfTO2R-2wGsKN80q0_BVG5ho9Ab9lnE4,1701
|
|
71
|
+
tests/unit/test_volumes_unit.py,sha256=YcyrGPCq7XfAtCdKdp44723o7mLGIu5_hKOdhAFVYV4,15305
|
|
68
72
|
tests/unit/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
69
73
|
tests/unit/cli/conftest.py,sha256=w6p_2URicywJKUCtY79tSD_mx8cwJtxHbK_Lu3grOYs,236
|
|
70
74
|
tests/unit/cli/test_cli_checkpoint_unit.py,sha256=Z4v80C2gqzRXdHons3_82lNC_VqL_YOt53HrUQnzReI,702
|
|
@@ -74,6 +78,7 @@ tests/unit/cli/test_cli_gpu_unit.py,sha256=xdwIZEZJcJlWSuLBEcLhZXXH9EogZoKRiJlMR
|
|
|
74
78
|
tests/unit/cli/test_cli_job_unit.py,sha256=UioSx_ZRY0qh4AJf90rIepputVUPBZ0KSqNR_u592UY,611
|
|
75
79
|
tests/unit/cli/test_cli_model_unit.py,sha256=AucngxvYjW6GidDGBPHnKyYOb82ff7xMX5mVVUcbrCA,629
|
|
76
80
|
tests/unit/cli/test_cli_project_unit.py,sha256=ms9gJ8pgMNGeIMdFcvBcwSPmb0i2qo9-rk9CCF53-9M,1756
|
|
81
|
+
tests/unit/cli/test_cli_volume_unit.py,sha256=VeYb9PHuD45E23JCybF-C7ZaHsuERw7me2OUmYnmAAg,644
|
|
77
82
|
tests/unit/cli/cloudbender/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
78
83
|
tests/unit/cli/cloudbender/test_cli_datastore_unit.py,sha256=sUZWnzzCCG7NdgzEHsQ2zHpEb-ZD6FDIazca6FqMOc0,1381
|
|
79
84
|
tests/unit/cli/cloudbender/test_cli_device_unit.py,sha256=o1vrPlbaanYK1iJG5pE6tDwgmOXDuLUY0VH8DxtaPYI,1342
|
|
@@ -89,9 +94,9 @@ tests/unit/cloudbender/test_nodes_unit.py,sha256=ehOHkNroiLKNTR09SbnBPpwELE72GcG
|
|
|
89
94
|
tests/unit/cloudbender/test_providers_unit.py,sha256=y63VCqHXb4Yu8sh0kW30-ojRvv9aUa5j1jNkmb46KTc,4373
|
|
90
95
|
tests/unit/cloudbender/test_regions_unit.py,sha256=9bvP268gpNyygjh1IEpSSiUt2aP6okv7QOsV1XoaIS0,6299
|
|
91
96
|
tests/unit/cloudbender/test_reservations_unit.py,sha256=ICuFT5sexnLvS7taoC18yQYuDZHpBRrNuCj3Uq_Arwo,5624
|
|
92
|
-
proximl-0.5.
|
|
93
|
-
proximl-0.5.
|
|
94
|
-
proximl-0.5.
|
|
95
|
-
proximl-0.5.
|
|
96
|
-
proximl-0.5.
|
|
97
|
-
proximl-0.5.
|
|
97
|
+
proximl-0.5.5.dist-info/LICENSE,sha256=ADFxLEZDxKY0j4MdyUd5GNuhQ18rnWH5rOz1ZG7yiOA,1069
|
|
98
|
+
proximl-0.5.5.dist-info/METADATA,sha256=ddskyt_dpYJW72fTThnwbbICvztVKb0eFgM2-crCFeg,7344
|
|
99
|
+
proximl-0.5.5.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
|
|
100
|
+
proximl-0.5.5.dist-info/entry_points.txt,sha256=HmI311IIabkZReMCXu-nGbvIEW-KfaduAOyfiSqt5SY,63
|
|
101
|
+
proximl-0.5.5.dist-info/top_level.txt,sha256=-TWqc9tAaxmWmW4c7uYsmzPEYUIoh6z02xxqPbv7Kys,23
|
|
102
|
+
proximl-0.5.5.dist-info/RECORD,,
|
|
@@ -54,18 +54,20 @@ class GetCheckpointTests:
|
|
|
54
54
|
|
|
55
55
|
@mark.create
|
|
56
56
|
@mark.asyncio
|
|
57
|
-
async def
|
|
57
|
+
async def test_checkpoint_wasabi(proximl, capsys):
|
|
58
58
|
checkpoint = await proximl.checkpoints.create(
|
|
59
|
-
name="CLI Automated
|
|
60
|
-
source_type="
|
|
61
|
-
source_uri="s3://proximl-
|
|
59
|
+
name="CLI Automated Wasabi",
|
|
60
|
+
source_type="wasabi",
|
|
61
|
+
source_uri="s3://proximl-example/models/proximl-examples",
|
|
62
|
+
capacity="10G",
|
|
63
|
+
source_options=dict(endpoint_url="https://s3.wasabisys.com"),
|
|
62
64
|
)
|
|
63
65
|
checkpoint = await checkpoint.wait_for("ready", 300)
|
|
64
66
|
status = checkpoint.status
|
|
65
67
|
size = checkpoint.size
|
|
66
68
|
await checkpoint.remove()
|
|
67
69
|
assert status == "ready"
|
|
68
|
-
assert size >=
|
|
70
|
+
assert size >= 500000
|
|
69
71
|
|
|
70
72
|
|
|
71
73
|
@mark.create
|
|
@@ -13,8 +13,9 @@ class GetDatasetTests:
|
|
|
13
13
|
async def dataset(self, proximl):
|
|
14
14
|
dataset = await proximl.datasets.create(
|
|
15
15
|
name="CLI Automated",
|
|
16
|
-
source_type="
|
|
17
|
-
source_uri="s3://proximl-
|
|
16
|
+
source_type="wasabi",
|
|
17
|
+
source_uri="s3://proximl-example/input/cifar-10/cifar-10-batches-bin",
|
|
18
|
+
source_options=dict(endpoint_url="https://s3.wasabisys.com"),
|
|
18
19
|
)
|
|
19
20
|
dataset = await dataset.wait_for("ready", 300)
|
|
20
21
|
yield dataset
|
|
@@ -48,9 +49,7 @@ class GetDatasetTests:
|
|
|
48
49
|
async def test_dataset_repr(self, dataset):
|
|
49
50
|
string = repr(dataset)
|
|
50
51
|
regex = (
|
|
51
|
-
r"^Dataset\( proximl , \*\*{.*'dataset_uuid': '"
|
|
52
|
-
+ dataset.id
|
|
53
|
-
+ r"'.*}\)$"
|
|
52
|
+
r"^Dataset\( proximl , \*\*{.*'dataset_uuid': '" + dataset.id + r"'.*}\)$"
|
|
54
53
|
)
|
|
55
54
|
assert isinstance(string, str)
|
|
56
55
|
assert re.match(regex, string)
|
|
@@ -269,7 +269,7 @@ class JobAPIDataValidationTests:
|
|
|
269
269
|
),
|
|
270
270
|
)
|
|
271
271
|
assert (
|
|
272
|
-
"Invalid Request -
|
|
272
|
+
"Invalid Request - output_type invalid for Notebook and Endpoint jobs"
|
|
273
273
|
in error.value.message
|
|
274
274
|
)
|
|
275
275
|
|
|
@@ -298,7 +298,45 @@ class JobAPIDataValidationTests:
|
|
|
298
298
|
),
|
|
299
299
|
)
|
|
300
300
|
assert (
|
|
301
|
-
"Invalid Request -
|
|
301
|
+
"Invalid Request - output_type invalid for Notebook and Endpoint jobs"
|
|
302
|
+
in error.value.message
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
async def test_invalid_volumes_for_training(self, proximl):
|
|
306
|
+
with raises(ApiError) as error:
|
|
307
|
+
await proximl.jobs.create(
|
|
308
|
+
name="Invalid Volumes for Training",
|
|
309
|
+
type="training",
|
|
310
|
+
gpu_types=["rtx3090"],
|
|
311
|
+
disk_size=10,
|
|
312
|
+
data=dict(
|
|
313
|
+
output_uri="s3://proximl-examples/output/resnet_cifar10",
|
|
314
|
+
output_type="aws",
|
|
315
|
+
volumes=["volume-id"],
|
|
316
|
+
),
|
|
317
|
+
workers=["python train.py"],
|
|
318
|
+
)
|
|
319
|
+
assert (
|
|
320
|
+
"Invalid Request - Only Notebook and Endpoint job types can use writable volumes"
|
|
321
|
+
in error.value.message
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
async def test_invalid_volumes_for_inference(self, proximl):
|
|
325
|
+
with raises(ApiError) as error:
|
|
326
|
+
await proximl.jobs.create(
|
|
327
|
+
name="Invalid Volumes for Inference",
|
|
328
|
+
type="inference",
|
|
329
|
+
gpu_types=["rtx3090"],
|
|
330
|
+
disk_size=10,
|
|
331
|
+
data=dict(
|
|
332
|
+
output_uri="s3://proximl-examples/output/resnet_cifar10",
|
|
333
|
+
output_type="aws",
|
|
334
|
+
volumes=["volume-id"],
|
|
335
|
+
),
|
|
336
|
+
workers=["python predict.py"],
|
|
337
|
+
)
|
|
338
|
+
assert (
|
|
339
|
+
"Invalid Request - Only Notebook and Endpoint job types can use writable volumes"
|
|
302
340
|
in error.value.message
|
|
303
341
|
)
|
|
304
342
|
|
|
@@ -43,29 +43,27 @@ class GetModelTests:
|
|
|
43
43
|
|
|
44
44
|
async def test_model_repr(self, model):
|
|
45
45
|
string = repr(model)
|
|
46
|
-
regex = (
|
|
47
|
-
r"^Model\( proximl , \*\*{.*'model_uuid': '"
|
|
48
|
-
+ model.id
|
|
49
|
-
+ r"'.*}\)$"
|
|
50
|
-
)
|
|
46
|
+
regex = r"^Model\( proximl , \*\*{.*'model_uuid': '" + model.id + r"'.*}\)$"
|
|
51
47
|
assert isinstance(string, str)
|
|
52
48
|
assert re.match(regex, string)
|
|
53
49
|
|
|
54
50
|
|
|
55
51
|
@mark.create
|
|
56
52
|
@mark.asyncio
|
|
57
|
-
async def
|
|
53
|
+
async def test_model_wasabi(proximl, capsys):
|
|
58
54
|
model = await proximl.models.create(
|
|
59
|
-
name="CLI Automated
|
|
60
|
-
source_type="
|
|
61
|
-
source_uri="s3://proximl-
|
|
55
|
+
name="CLI Automated Wasabi",
|
|
56
|
+
source_type="wasabi",
|
|
57
|
+
source_uri="s3://proximl-example/models/proximl-examples",
|
|
58
|
+
capacity="10G",
|
|
59
|
+
source_options=dict(endpoint_url="https://s3.wasabisys.com"),
|
|
62
60
|
)
|
|
63
61
|
model = await model.wait_for("ready", 300)
|
|
64
62
|
status = model.status
|
|
65
63
|
size = model.size
|
|
66
64
|
await model.remove()
|
|
67
65
|
assert status == "ready"
|
|
68
|
-
assert size >=
|
|
66
|
+
assert size >= 500000
|
|
69
67
|
|
|
70
68
|
|
|
71
69
|
@mark.create
|
|
@@ -11,9 +11,7 @@ pytestmark = [mark.sdk, mark.integration, mark.projects]
|
|
|
11
11
|
class GetProjectsTests:
|
|
12
12
|
@fixture(scope="class")
|
|
13
13
|
async def project(self, proximl):
|
|
14
|
-
project = await proximl.projects.create(
|
|
15
|
-
name="New Project", copy_keys=False
|
|
16
|
-
)
|
|
14
|
+
project = await proximl.projects.create(name="New Project", copy_keys=False)
|
|
17
15
|
yield project
|
|
18
16
|
await project.remove()
|
|
19
17
|
|
|
@@ -41,8 +39,6 @@ class GetProjectsTests:
|
|
|
41
39
|
|
|
42
40
|
async def test_project_repr(self, project):
|
|
43
41
|
string = repr(project)
|
|
44
|
-
regex = (
|
|
45
|
-
r"^Project\( proximl , \*\*{.*'id': '" + project.id + r"'.*}\)$"
|
|
46
|
-
)
|
|
42
|
+
regex = r"^Project\( proximl , \*\*{.*'id': '" + project.id + r"'.*}\)$"
|
|
47
43
|
assert isinstance(string, str)
|
|
48
44
|
assert re.match(regex, string)
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import sys
|
|
3
|
+
import asyncio
|
|
4
|
+
from pytest import mark, fixture
|
|
5
|
+
|
|
6
|
+
pytestmark = [mark.sdk, mark.integration, mark.volumes]
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@mark.create
|
|
10
|
+
@mark.asyncio
|
|
11
|
+
class GetVolumeTests:
|
|
12
|
+
@fixture(scope="class")
|
|
13
|
+
async def volume(self, proximl):
|
|
14
|
+
volume = await proximl.volumes.create(
|
|
15
|
+
name="CLI Automated",
|
|
16
|
+
source_type="git",
|
|
17
|
+
source_uri="git@github.com:proxiML/environment-tests.git",
|
|
18
|
+
capacity="10G",
|
|
19
|
+
)
|
|
20
|
+
volume = await volume.wait_for("ready", 120)
|
|
21
|
+
yield volume
|
|
22
|
+
await volume.remove()
|
|
23
|
+
volume = await volume.wait_for("archived", 60)
|
|
24
|
+
|
|
25
|
+
async def test_get_volumes(self, proximl, volume):
|
|
26
|
+
volumes = await proximl.volumes.list()
|
|
27
|
+
assert len(volumes) > 0
|
|
28
|
+
|
|
29
|
+
async def test_get_volume(self, proximl, volume):
|
|
30
|
+
response = await proximl.volumes.get(volume.id)
|
|
31
|
+
assert response.id == volume.id
|
|
32
|
+
|
|
33
|
+
async def test_volume_properties(self, volume):
|
|
34
|
+
assert isinstance(volume.id, str)
|
|
35
|
+
assert isinstance(volume.status, str)
|
|
36
|
+
assert isinstance(volume.name, str)
|
|
37
|
+
assert isinstance(volume.capacity, str)
|
|
38
|
+
assert isinstance(volume.used_size, int)
|
|
39
|
+
assert isinstance(volume.billed_size, int)
|
|
40
|
+
|
|
41
|
+
async def test_volume_str(self, volume):
|
|
42
|
+
string = str(volume)
|
|
43
|
+
regex = r"^{.*\"id\": \"" + volume.id + r"\".*}$"
|
|
44
|
+
assert isinstance(string, str)
|
|
45
|
+
assert re.match(regex, string)
|
|
46
|
+
|
|
47
|
+
async def test_volume_repr(self, volume):
|
|
48
|
+
string = repr(volume)
|
|
49
|
+
regex = r"^Volume\( proximl , \*\*{.*'id': '" + volume.id + r"'.*}\)$"
|
|
50
|
+
assert isinstance(string, str)
|
|
51
|
+
assert re.match(regex, string)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@mark.create
|
|
55
|
+
@mark.asyncio
|
|
56
|
+
async def test_volume_wasabi(proximl, capsys):
|
|
57
|
+
volume = await proximl.volumes.create(
|
|
58
|
+
name="CLI Automated Wasabi",
|
|
59
|
+
source_type="wasabi",
|
|
60
|
+
source_uri="s3://proximl-example/models/proximl-examples",
|
|
61
|
+
capacity="10G",
|
|
62
|
+
source_options=dict(endpoint_url="https://s3.wasabisys.com"),
|
|
63
|
+
)
|
|
64
|
+
volume = await volume.wait_for("ready", 300)
|
|
65
|
+
status = volume.status
|
|
66
|
+
billed_size = volume.billed_size
|
|
67
|
+
used_size = volume.used_size
|
|
68
|
+
await volume.remove()
|
|
69
|
+
assert status == "ready"
|
|
70
|
+
assert billed_size >= 10000000
|
|
71
|
+
assert used_size >= 500000
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@mark.create
|
|
75
|
+
@mark.asyncio
|
|
76
|
+
async def test_volume_local(proximl, capsys):
|
|
77
|
+
volume = await proximl.volumes.create(
|
|
78
|
+
name="CLI Automated Local",
|
|
79
|
+
source_type="local",
|
|
80
|
+
source_uri="~/tensorflow-model",
|
|
81
|
+
capacity="10G",
|
|
82
|
+
)
|
|
83
|
+
attach_task = asyncio.create_task(volume.attach())
|
|
84
|
+
connect_task = asyncio.create_task(volume.connect())
|
|
85
|
+
await asyncio.gather(attach_task, connect_task)
|
|
86
|
+
await volume.disconnect()
|
|
87
|
+
await volume.refresh()
|
|
88
|
+
status = volume.status
|
|
89
|
+
billed_size = volume.billed_size
|
|
90
|
+
used_size = volume.used_size
|
|
91
|
+
await volume.remove()
|
|
92
|
+
assert status == "ready"
|
|
93
|
+
assert billed_size >= 10000000
|
|
94
|
+
assert used_size >= 1000000
|
|
95
|
+
captured = capsys.readouterr()
|
|
96
|
+
sys.stdout.write(captured.out)
|
|
97
|
+
sys.stderr.write(captured.err)
|
|
98
|
+
assert "Starting data upload from local" in captured.out
|
|
99
|
+
assert "official/LICENSE 11456 bytes" in captured.out
|
|
100
|
+
assert "Upload complete" in captured.out
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import json
|
|
3
|
+
import click
|
|
4
|
+
from unittest.mock import AsyncMock, patch
|
|
5
|
+
from pytest import mark, fixture, raises
|
|
6
|
+
|
|
7
|
+
pytestmark = [mark.cli, mark.unit, mark.volumes]
|
|
8
|
+
|
|
9
|
+
from proximl.cli import volume as specimen
|
|
10
|
+
from proximl.volumes import Volume
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def test_list(runner, mock_my_volumes):
|
|
14
|
+
with patch("proximl.cli.ProxiML", new=AsyncMock) as mock_proximl:
|
|
15
|
+
mock_proximl.volumes = AsyncMock()
|
|
16
|
+
mock_proximl.volumes.list = AsyncMock(return_value=mock_my_volumes)
|
|
17
|
+
result = runner.invoke(specimen, ["list"])
|
|
18
|
+
print(result)
|
|
19
|
+
assert result.exit_code == 0
|
|
20
|
+
mock_proximl.volumes.list.assert_called_once()
|