proximl 0.5.9__py3-none-any.whl → 0.5.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. proximl/__init__.py +1 -1
  2. proximl/checkpoints.py +46 -28
  3. proximl/cli/cloudbender/__init__.py +2 -1
  4. proximl/cli/cloudbender/datastore.py +2 -7
  5. proximl/cli/cloudbender/service.py +19 -2
  6. proximl/cli/project/__init__.py +3 -72
  7. proximl/cli/project/data_connector.py +61 -0
  8. proximl/cli/project/datastore.py +61 -0
  9. proximl/cli/project/service.py +61 -0
  10. proximl/cloudbender/cloudbender.py +4 -2
  11. proximl/cloudbender/data_connectors.py +8 -0
  12. proximl/cloudbender/datastores.py +9 -19
  13. proximl/cloudbender/nodes.py +44 -1
  14. proximl/cloudbender/providers.py +53 -0
  15. proximl/cloudbender/regions.py +48 -0
  16. proximl/cloudbender/services.py +65 -1
  17. proximl/datasets.py +41 -12
  18. proximl/exceptions.py +51 -0
  19. proximl/jobs.py +15 -19
  20. proximl/models.py +41 -22
  21. proximl/volumes.py +24 -5
  22. {proximl-0.5.9.dist-info → proximl-0.5.11.dist-info}/METADATA +1 -1
  23. {proximl-0.5.9.dist-info → proximl-0.5.11.dist-info}/RECORD +48 -46
  24. tests/integration/projects/conftest.py +3 -1
  25. tests/integration/projects/test_projects_data_connectors_integration.py +44 -0
  26. tests/integration/projects/test_projects_datastores_integration.py +42 -0
  27. tests/integration/projects/test_projects_services_integration.py +44 -0
  28. tests/integration/test_checkpoints_integration.py +1 -2
  29. tests/integration/test_jobs_integration.py +13 -0
  30. tests/integration/test_models_integration.py +0 -1
  31. tests/unit/cli/projects/__init__.py +0 -0
  32. tests/unit/cli/projects/test_cli_project_data_connector_unit.py +28 -0
  33. tests/unit/cli/projects/test_cli_project_datastore_unit.py +26 -0
  34. tests/unit/cli/projects/test_cli_project_key_unit.py +26 -0
  35. tests/unit/cli/projects/test_cli_project_secret_unit.py +26 -0
  36. tests/unit/cli/projects/test_cli_project_service_unit.py +26 -0
  37. tests/unit/cli/projects/test_cli_project_unit.py +19 -0
  38. tests/unit/cloudbender/test_datastores_unit.py +1 -5
  39. tests/unit/cloudbender/test_services_unit.py +6 -0
  40. tests/unit/conftest.py +158 -15
  41. tests/unit/test_checkpoints_unit.py +15 -23
  42. tests/unit/test_datasets_unit.py +15 -20
  43. tests/unit/test_models_unit.py +13 -16
  44. tests/unit/test_volumes_unit.py +3 -0
  45. proximl/cli/cloudbender/reservation.py +0 -159
  46. proximl/cli/project.py +0 -154
  47. proximl/cloudbender/reservations.py +0 -126
  48. proximl/projects.py +0 -187
  49. tests/integration/test_projects_integration.py +0 -44
  50. tests/unit/cli/cloudbender/test_cli_reservation_unit.py +0 -38
  51. tests/unit/cli/test_cli_project_unit.py +0 -46
  52. tests/unit/cloudbender/test_reservations_unit.py +0 -173
  53. tests/unit/test_auth.py +0 -30
  54. tests/unit/test_projects_unit.py +0 -294
  55. tests/unit/test_proximl.py +0 -54
  56. {proximl-0.5.9.dist-info → proximl-0.5.11.dist-info}/LICENSE +0 -0
  57. {proximl-0.5.9.dist-info → proximl-0.5.11.dist-info}/WHEEL +0 -0
  58. {proximl-0.5.9.dist-info → proximl-0.5.11.dist-info}/entry_points.txt +0 -0
  59. {proximl-0.5.9.dist-info → proximl-0.5.11.dist-info}/top_level.txt +0 -0
proximl/__init__.py CHANGED
@@ -13,5 +13,5 @@ logging.basicConfig(
13
13
  logger = logging.getLogger(__name__)
14
14
 
15
15
 
16
- __version__ = "0.5.5"
16
+ __version__ = "0.5.11"
17
17
  __all__ = "ProxiML"
proximl/checkpoints.py CHANGED
@@ -23,9 +23,7 @@ class Checkpoints(object):
23
23
 
24
24
  async def list(self, **kwargs):
25
25
  resp = await self.proximl._query(f"/checkpoint", "GET", kwargs)
26
- checkpoints = [
27
- Checkpoint(self.proximl, **checkpoint) for checkpoint in resp
28
- ]
26
+ checkpoints = [Checkpoint(self.proximl, **checkpoint) for checkpoint in resp]
29
27
  return checkpoints
30
28
 
31
29
  async def list_public(self, **kwargs):
@@ -33,14 +31,24 @@ class Checkpoints(object):
33
31
  datasets = [Checkpoint(self.proximl, **dataset) for dataset in resp]
34
32
  return datasets
35
33
 
36
- async def create(self, name, source_type, source_uri, **kwargs):
34
+ async def create(
35
+ self,
36
+ name,
37
+ source_type,
38
+ source_uri,
39
+ type="evefs",
40
+ project_uuid=None,
41
+ **kwargs,
42
+ ):
43
+ if not project_uuid:
44
+ project_uuid = self.proximl.active_project
37
45
  data = dict(
38
46
  name=name,
39
47
  source_type=source_type,
40
48
  source_uri=source_uri,
41
- source_options=kwargs.get("source_options"),
42
- project_uuid=kwargs.get("project_uuid")
43
- or self.proximl.active_project,
49
+ project_uuid=project_uuid,
50
+ type=type,
51
+ **kwargs,
44
52
  )
45
53
  payload = {k: v for k, v in data.items() if v is not None}
46
54
  logging.info(f"Creating Checkpoint {name}")
@@ -60,12 +68,13 @@ class Checkpoint:
60
68
  def __init__(self, proximl, **kwargs):
61
69
  self.proximl = proximl
62
70
  self._checkpoint = kwargs
63
- self._id = self._checkpoint.get(
64
- "id", self._checkpoint.get("checkpoint_uuid")
65
- )
71
+ self._id = self._checkpoint.get("id", self._checkpoint.get("checkpoint_uuid"))
66
72
  self._status = self._checkpoint.get("status")
67
73
  self._name = self._checkpoint.get("name")
68
- self._size = self._checkpoint.get("size")
74
+ self._size = self._checkpoint.get("size") or self._checkpoint.get("used_size")
75
+ self._billed_size = self._checkpoint.get("billed_size") or self._checkpoint.get(
76
+ "size"
77
+ )
69
78
  self._project_uuid = self._checkpoint.get("project_uuid")
70
79
 
71
80
  @property
@@ -84,6 +93,10 @@ class Checkpoint:
84
93
  def size(self) -> int:
85
94
  return self._size
86
95
 
96
+ @property
97
+ def billed_size(self) -> int:
98
+ return self._billed_size
99
+
87
100
  def __str__(self):
88
101
  return json.dumps({k: v for k, v in self._checkpoint.items()})
89
102
 
@@ -123,15 +136,17 @@ class Checkpoint:
123
136
  entity_type="checkpoint",
124
137
  project_uuid=self._checkpoint.get("project_uuid"),
125
138
  cidr=self._checkpoint.get("vpn").get("cidr"),
126
- ssh_port=self._checkpoint.get("vpn")
127
- .get("client")
128
- .get("ssh_port"),
129
- input_path=self._checkpoint.get("source_uri")
130
- if self.status in ["new", "downloading"]
131
- else None,
132
- output_path=self._checkpoint.get("output_uri")
133
- if self.status == "exporting"
134
- else None,
139
+ ssh_port=self._checkpoint.get("vpn").get("client").get("ssh_port"),
140
+ input_path=(
141
+ self._checkpoint.get("source_uri")
142
+ if self.status in ["new", "downloading"]
143
+ else None
144
+ ),
145
+ output_path=(
146
+ self._checkpoint.get("output_uri")
147
+ if self.status == "exporting"
148
+ else None
149
+ ),
135
150
  )
136
151
  else:
137
152
  details = dict()
@@ -195,9 +210,7 @@ class Checkpoint:
195
210
  if msg_handler:
196
211
  msg_handler(data)
197
212
  else:
198
- timestamp = datetime.fromtimestamp(
199
- int(data.get("time")) / 1000
200
- )
213
+ timestamp = datetime.fromtimestamp(int(data.get("time")) / 1000)
201
214
  print(
202
215
  f"{timestamp.strftime('%m/%d/%Y, %H:%M:%S')}: {data.get('msg').rstrip()}"
203
216
  )
@@ -224,19 +237,24 @@ class Checkpoint:
224
237
  return self
225
238
 
226
239
  async def wait_for(self, status, timeout=300):
240
+ if self.status == status:
241
+ return
227
242
  valid_statuses = ["downloading", "ready", "archived"]
228
243
  if not status in valid_statuses:
229
244
  raise SpecificationError(
230
245
  "status",
231
246
  f"Invalid wait_for status {status}. Valid statuses are: {valid_statuses}",
232
247
  )
233
- if self.status == status:
234
- return
248
+
249
+ MAX_TIMEOUT = 24 * 60 * 60
250
+ if timeout > MAX_TIMEOUT:
251
+ raise SpecificationError(
252
+ "timeout",
253
+ f"timeout must be less than {MAX_TIMEOUT} seconds.",
254
+ )
235
255
  POLL_INTERVAL_MIN = 5
236
256
  POLL_INTERVAL_MAX = 60
237
- POLL_INTERVAL = max(
238
- min(timeout / 60, POLL_INTERVAL_MAX), POLL_INTERVAL_MIN
239
- )
257
+ POLL_INTERVAL = max(min(timeout / 60, POLL_INTERVAL_MAX), POLL_INTERVAL_MIN)
240
258
  retry_count = math.ceil(timeout / POLL_INTERVAL)
241
259
  count = 0
242
260
  while count < retry_count:
@@ -15,4 +15,5 @@ from proximl.cli.cloudbender.region import region
15
15
  from proximl.cli.cloudbender.node import node
16
16
  from proximl.cli.cloudbender.device import device
17
17
  from proximl.cli.cloudbender.datastore import datastore
18
- from proximl.cli.cloudbender.reservation import reservation
18
+ from proximl.cli.cloudbender.data_connector import data_connector
19
+ from proximl.cli.cloudbender.service import service
@@ -29,13 +29,11 @@ def datastore(config):
29
29
  def list(config, provider, region):
30
30
  """List datastores."""
31
31
  data = [
32
- ["ID", "NAME", "TYPE", "URI", "ROOT"],
32
+ ["ID", "NAME", "TYPE"],
33
33
  [
34
34
  "-" * 80,
35
35
  "-" * 80,
36
36
  "-" * 80,
37
- "-" * 80,
38
- "-" * 80,
39
37
  ],
40
38
  ]
41
39
 
@@ -51,15 +49,12 @@ def list(config, provider, region):
51
49
  datastore.id,
52
50
  datastore.name,
53
51
  datastore.type,
54
- datastore.uri,
55
- datastore.root,
56
52
  ]
57
53
  )
58
54
 
59
55
  for row in data:
60
56
  click.echo(
61
- "{: >37.36} {: >29.28} {: >9.8} {: >12.11} {: >12.11}"
62
- "".format(*row),
57
+ "{: >37.36} {: >29.28} {: >9.8} " "".format(*row),
63
58
  file=config.stdout,
64
59
  )
65
60
 
@@ -74,6 +74,19 @@ def list(config, provider, region):
74
74
  required=True,
75
75
  help="The region ID to create the service in.",
76
76
  )
77
+ @click.option(
78
+ "--type",
79
+ "-t",
80
+ type=click.Choice(
81
+ [
82
+ "https",
83
+ "tcp",
84
+ "udp",
85
+ ],
86
+ ),
87
+ required=True,
88
+ help="The type of regional service.",
89
+ )
77
90
  @click.option(
78
91
  "--public/--no-public",
79
92
  default=True,
@@ -82,13 +95,17 @@ def list(config, provider, region):
82
95
  )
83
96
  @click.argument("name", type=click.STRING, required=True)
84
97
  @pass_config
85
- def create(config, provider, region, public, name):
98
+ def create(config, provider, region, type, public, name):
86
99
  """
87
100
  Creates a service.
88
101
  """
89
102
  return config.proximl.run(
90
103
  config.proximl.client.cloudbender.services.create(
91
- provider_uuid=provider, region_uuid=region, name=name, public=public
104
+ provider_uuid=provider,
105
+ region_uuid=region,
106
+ name=name,
107
+ type=type,
108
+ public=public,
92
109
  )
93
110
  )
94
111
 
@@ -77,77 +77,8 @@ def remove(config, project):
77
77
  return config.proximl.run(found.remove())
78
78
 
79
79
 
80
- @project.command()
81
- @pass_config
82
- def list_datastores(config):
83
- """List project datastores."""
84
- data = [
85
- ["ID", "NAME", "TYPE", "REGION_UUID"],
86
- [
87
- "-" * 80,
88
- "-" * 80,
89
- "-" * 80,
90
- "-" * 80,
91
- ],
92
- ]
93
- project = config.proximl.run(
94
- config.proximl.client.projects.get(config.proximl.client.project)
95
- )
96
-
97
- datastores = config.proximl.run(project.list_datastores())
98
-
99
- for datastore in datastores:
100
- data.append(
101
- [
102
- datastore.id,
103
- datastore.name,
104
- datastore.type,
105
- datastore.region_uuid,
106
- ]
107
- )
108
-
109
- for row in data:
110
- click.echo(
111
- "{: >38.36} {: >30.28} {: >15.13} {: >38.36}" "".format(*row),
112
- file=config.stdout,
113
- )
114
-
115
-
116
- @project.command()
117
- @pass_config
118
- def list_services(config):
119
- """List project services."""
120
- data = [
121
- ["ID", "NAME", "HOSTNAME", "REGION_UUID"],
122
- [
123
- "-" * 80,
124
- "-" * 80,
125
- "-" * 80,
126
- "-" * 80,
127
- ],
128
- ]
129
- project = config.proximl.run(
130
- config.proximl.client.projects.get(config.proximl.client.project)
131
- )
132
-
133
- services = config.proximl.run(project.list_services())
134
-
135
- for service in services:
136
- data.append(
137
- [
138
- service.id,
139
- service.name,
140
- service.hostname,
141
- service.region_uuid,
142
- ]
143
- )
144
-
145
- for row in data:
146
- click.echo(
147
- "{: >38.36} {: >30.28} {: >30.28} {: >38.36}" "".format(*row),
148
- file=config.stdout,
149
- )
150
-
151
-
152
80
  from proximl.cli.project.secret import secret
153
81
  from proximl.cli.project.key import key
82
+ from proximl.cli.project.data_connector import data_connector
83
+ from proximl.cli.project.datastore import datastore
84
+ from proximl.cli.project.service import service
@@ -0,0 +1,61 @@
1
+ import click
2
+ import os
3
+ import json
4
+ import base64
5
+ from pathlib import Path
6
+ from proximl.cli import pass_config
7
+ from proximl.cli.project import project
8
+
9
+
10
+ @project.group()
11
+ @pass_config
12
+ def data_connector(config):
13
+ """proxiML project data_connector commands."""
14
+ pass
15
+
16
+
17
+ @data_connector.command()
18
+ @pass_config
19
+ def list(config):
20
+ """List project data_connectors."""
21
+ data = [
22
+ ["ID", "NAME", "TYPE", "REGION_UUID"],
23
+ [
24
+ "-" * 80,
25
+ "-" * 80,
26
+ "-" * 80,
27
+ "-" * 80,
28
+ ],
29
+ ]
30
+ project = config.proximl.run(
31
+ config.proximl.client.projects.get(config.proximl.client.project)
32
+ )
33
+
34
+ data_connectors = config.proximl.run(project.data_connectors.list())
35
+
36
+ for data_connector in data_connectors:
37
+ data.append(
38
+ [
39
+ data_connector.id,
40
+ data_connector.name,
41
+ data_connector.type,
42
+ data_connector.region_uuid,
43
+ ]
44
+ )
45
+
46
+ for row in data:
47
+ click.echo(
48
+ "{: >38.36} {: >30.28} {: >15.13} {: >38.36}" "".format(*row),
49
+ file=config.stdout,
50
+ )
51
+
52
+
53
+ @data_connector.command()
54
+ @pass_config
55
+ def refresh(config):
56
+ """
57
+ Refresh project data_connector list.
58
+ """
59
+ project = config.proximl.run(config.proximl.client.projects.get_current())
60
+
61
+ return config.proximl.run(project.data_connectors.refresh())
@@ -0,0 +1,61 @@
1
+ import click
2
+ import os
3
+ import json
4
+ import base64
5
+ from pathlib import Path
6
+ from proximl.cli import pass_config
7
+ from proximl.cli.project import project
8
+
9
+
10
+ @project.group()
11
+ @pass_config
12
+ def datastore(config):
13
+ """proxiML project datastore commands."""
14
+ pass
15
+
16
+
17
+ @datastore.command()
18
+ @pass_config
19
+ def list(config):
20
+ """List project datastores."""
21
+ data = [
22
+ ["ID", "NAME", "TYPE", "REGION_UUID"],
23
+ [
24
+ "-" * 80,
25
+ "-" * 80,
26
+ "-" * 80,
27
+ "-" * 80,
28
+ ],
29
+ ]
30
+ project = config.proximl.run(
31
+ config.proximl.client.projects.get(config.proximl.client.project)
32
+ )
33
+
34
+ datastores = config.proximl.run(project.datastores.list())
35
+
36
+ for datastore in datastores:
37
+ data.append(
38
+ [
39
+ datastore.id,
40
+ datastore.name,
41
+ datastore.type,
42
+ datastore.region_uuid,
43
+ ]
44
+ )
45
+
46
+ for row in data:
47
+ click.echo(
48
+ "{: >38.36} {: >30.28} {: >15.13} {: >38.36}" "".format(*row),
49
+ file=config.stdout,
50
+ )
51
+
52
+
53
+ @datastore.command()
54
+ @pass_config
55
+ def refresh(config):
56
+ """
57
+ Refresh project datastore list.
58
+ """
59
+ project = config.proximl.run(config.proximl.client.projects.get_current())
60
+
61
+ return config.proximl.run(project.datastores.refresh())
@@ -0,0 +1,61 @@
1
+ import click
2
+ import os
3
+ import json
4
+ import base64
5
+ from pathlib import Path
6
+ from proximl.cli import pass_config
7
+ from proximl.cli.project import project
8
+
9
+
10
+ @project.group()
11
+ @pass_config
12
+ def service(config):
13
+ """proxiML project service commands."""
14
+ pass
15
+
16
+
17
+ @service.command()
18
+ @pass_config
19
+ def list(config):
20
+ """List project services."""
21
+ data = [
22
+ ["ID", "NAME", "TYPE", "REGION_UUID"],
23
+ [
24
+ "-" * 80,
25
+ "-" * 80,
26
+ "-" * 80,
27
+ "-" * 80,
28
+ ],
29
+ ]
30
+ project = config.proximl.run(
31
+ config.proximl.client.projects.get(config.proximl.client.project)
32
+ )
33
+
34
+ services = config.proximl.run(project.services.list())
35
+
36
+ for service in services:
37
+ data.append(
38
+ [
39
+ service.id,
40
+ service.name,
41
+ service.hostname,
42
+ service.region_uuid,
43
+ ]
44
+ )
45
+
46
+ for row in data:
47
+ click.echo(
48
+ "{: >38.36} {: >30.28} {: >15.13} {: >38.36}" "".format(*row),
49
+ file=config.stdout,
50
+ )
51
+
52
+
53
+ @service.command()
54
+ @pass_config
55
+ def refresh(config):
56
+ """
57
+ Refresh project service list.
58
+ """
59
+ project = config.proximl.run(config.proximl.client.projects.get_current())
60
+
61
+ return config.proximl.run(project.services.refresh())
@@ -3,7 +3,8 @@ from .regions import Regions
3
3
  from .nodes import Nodes
4
4
  from .devices import Devices
5
5
  from .datastores import Datastores
6
- from .reservations import Reservations
6
+ from .data_connectors import DataConnectors
7
+ from .services import Services
7
8
  from .device_configs import DeviceConfigs
8
9
 
9
10
 
@@ -15,5 +16,6 @@ class Cloudbender(object):
15
16
  self.nodes = Nodes(proximl)
16
17
  self.devices = Devices(proximl)
17
18
  self.datastores = Datastores(proximl)
18
- self.reservations = Reservations(proximl)
19
+ self.data_connectors = DataConnectors(proximl)
20
+ self.services = Services(proximl)
19
21
  self.device_configs = DeviceConfigs(proximl)
@@ -1,5 +1,13 @@
1
1
  import json
2
2
  import logging
3
+ import asyncio
4
+ import math
5
+
6
+ from proximl.exceptions import (
7
+ ApiError,
8
+ SpecificationError,
9
+ ProxiMLException,
10
+ )
3
11
 
4
12
 
5
13
  class DataConnectors(object):
@@ -1,5 +1,13 @@
1
1
  import json
2
2
  import logging
3
+ import asyncio
4
+ import math
5
+
6
+ from proximl.exceptions import (
7
+ ApiError,
8
+ SpecificationError,
9
+ ProxiMLException,
10
+ )
3
11
 
4
12
 
5
13
  class Datastores(object):
@@ -20,9 +28,7 @@ class Datastores(object):
20
28
  "GET",
21
29
  kwargs,
22
30
  )
23
- datastores = [
24
- Datastore(self.proximl, **datastore) for datastore in resp
25
- ]
31
+ datastores = [Datastore(self.proximl, **datastore) for datastore in resp]
26
32
  return datastores
27
33
 
28
34
  async def create(
@@ -31,18 +37,12 @@ class Datastores(object):
31
37
  region_uuid,
32
38
  name,
33
39
  type,
34
- uri,
35
- root,
36
- options=None,
37
40
  **kwargs,
38
41
  ):
39
42
  logging.info(f"Creating Datastore {name}")
40
43
  data = dict(
41
44
  name=name,
42
45
  type=type,
43
- uri=uri,
44
- root=root,
45
- options=options,
46
46
  **kwargs,
47
47
  )
48
48
  payload = {k: v for k, v in data.items() if v is not None}
@@ -73,8 +73,6 @@ class Datastore:
73
73
  self._region_uuid = self._datastore.get("region_uuid")
74
74
  self._type = self._datastore.get("type")
75
75
  self._name = self._datastore.get("name")
76
- self._uri = self._datastore.get("uri")
77
- self._root = self._datastore.get("root")
78
76
 
79
77
  @property
80
78
  def id(self) -> str:
@@ -96,14 +94,6 @@ class Datastore:
96
94
  def name(self) -> str:
97
95
  return self._name
98
96
 
99
- @property
100
- def uri(self) -> str:
101
- return self._uri
102
-
103
- @property
104
- def root(self) -> str:
105
- return self._root
106
-
107
97
  def __str__(self):
108
98
  return json.dumps({k: v for k, v in self._datastore.items()})
109
99
 
@@ -1,5 +1,9 @@
1
1
  import json
2
2
  import logging
3
+ import asyncio
4
+ import math
5
+
6
+ from proximl.exceptions import ApiError, SpecificationError, ProxiMLException, NodeError
3
7
 
4
8
 
5
9
  class Nodes(object):
@@ -29,7 +33,7 @@ class Nodes(object):
29
33
  region_uuid,
30
34
  friendly_name,
31
35
  hostname,
32
- minion_id,
36
+ minion_id=None,
33
37
  type="permanent",
34
38
  service="compute",
35
39
  **kwargs,
@@ -153,3 +157,42 @@ class Node:
153
157
  None,
154
158
  dict(command=command),
155
159
  )
160
+
161
+ async def wait_for(self, status, timeout=300):
162
+ if self.status == status:
163
+ return
164
+ valid_statuses = ["active", "maintenance", "offline", "stopped", "archived"]
165
+ if not status in valid_statuses:
166
+ raise SpecificationError(
167
+ "status",
168
+ f"Invalid wait_for status {status}. Valid statuses are: {valid_statuses}",
169
+ )
170
+ MAX_TIMEOUT = 24 * 60 * 60
171
+ if timeout > MAX_TIMEOUT:
172
+ raise SpecificationError(
173
+ "timeout",
174
+ f"timeout must be less than {MAX_TIMEOUT} seconds.",
175
+ )
176
+
177
+ POLL_INTERVAL_MIN = 5
178
+ POLL_INTERVAL_MAX = 60
179
+ POLL_INTERVAL = max(min(timeout / 60, POLL_INTERVAL_MAX), POLL_INTERVAL_MIN)
180
+ retry_count = math.ceil(timeout / POLL_INTERVAL)
181
+ count = 0
182
+ while count < retry_count:
183
+ await asyncio.sleep(POLL_INTERVAL)
184
+ try:
185
+ await self.refresh()
186
+ except ApiError as e:
187
+ if status == "archived" and e.status == 404:
188
+ return
189
+ raise e
190
+ if self.status in ["errored", "failed"]:
191
+ raise NodeError(self.status, self)
192
+ if self.status == status:
193
+ return self
194
+ else:
195
+ count += 1
196
+ logging.debug(f"self: {self}, retry count {count}")
197
+
198
+ raise ProxiMLException(f"Timeout waiting for {status}")