proximl 0.5.6__tar.gz → 0.5.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. {proximl-0.5.6/proximl.egg-info → proximl-0.5.8}/PKG-INFO +1 -1
  2. {proximl-0.5.6 → proximl-0.5.8}/proximl/__init__.py +1 -1
  3. {proximl-0.5.6 → proximl-0.5.8}/proximl/checkpoints.py +33 -26
  4. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/cloudbender/__init__.py +1 -0
  5. proximl-0.5.8/proximl/cli/cloudbender/data_connector.py +159 -0
  6. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/cloudbender/service.py +19 -2
  7. {proximl-0.5.6 → proximl-0.5.8}/proximl/cloudbender/cloudbender.py +2 -0
  8. proximl-0.5.8/proximl/cloudbender/data_connectors.py +112 -0
  9. {proximl-0.5.6 → proximl-0.5.8}/proximl/cloudbender/services.py +65 -1
  10. {proximl-0.5.6 → proximl-0.5.8}/proximl/datasets.py +27 -9
  11. {proximl-0.5.6 → proximl-0.5.8}/proximl/jobs.py +13 -6
  12. {proximl-0.5.6 → proximl-0.5.8}/proximl/models.py +28 -20
  13. {proximl-0.5.6 → proximl-0.5.8}/proximl/projects.py +60 -8
  14. {proximl-0.5.6 → proximl-0.5.8}/proximl/volumes.py +9 -2
  15. {proximl-0.5.6 → proximl-0.5.8/proximl.egg-info}/PKG-INFO +1 -1
  16. {proximl-0.5.6 → proximl-0.5.8}/proximl.egg-info/SOURCES.txt +4 -1
  17. {proximl-0.5.6 → proximl-0.5.8}/pyproject.toml +1 -0
  18. {proximl-0.5.6 → proximl-0.5.8}/tests/integration/test_jobs_integration.py +13 -0
  19. proximl-0.5.8/tests/unit/cloudbender/test_data_connectors_unit.py +176 -0
  20. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cloudbender/test_services_unit.py +6 -0
  21. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/test_projects_unit.py +45 -5
  22. {proximl-0.5.6 → proximl-0.5.8}/LICENSE +0 -0
  23. {proximl-0.5.6 → proximl-0.5.8}/README.md +0 -0
  24. {proximl-0.5.6 → proximl-0.5.8}/examples/__init__.py +0 -0
  25. {proximl-0.5.6 → proximl-0.5.8}/examples/create_dataset_and_training_job.py +0 -0
  26. {proximl-0.5.6 → proximl-0.5.8}/examples/local_storage.py +0 -0
  27. {proximl-0.5.6 → proximl-0.5.8}/examples/training_inference_pipeline.py +0 -0
  28. {proximl-0.5.6 → proximl-0.5.8}/proximl/__main__.py +0 -0
  29. {proximl-0.5.6 → proximl-0.5.8}/proximl/auth.py +0 -0
  30. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/__init__.py +0 -0
  31. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/checkpoint.py +0 -0
  32. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/cloudbender/datastore.py +0 -0
  33. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/cloudbender/device.py +0 -0
  34. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/cloudbender/node.py +0 -0
  35. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/cloudbender/provider.py +0 -0
  36. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/cloudbender/region.py +0 -0
  37. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/connection.py +0 -0
  38. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/dataset.py +0 -0
  39. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/environment.py +0 -0
  40. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/gpu.py +0 -0
  41. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/job/__init__.py +0 -0
  42. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/job/create.py +0 -0
  43. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/model.py +0 -0
  44. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/project.py +0 -0
  45. {proximl-0.5.6 → proximl-0.5.8}/proximl/cli/volume.py +0 -0
  46. {proximl-0.5.6 → proximl-0.5.8}/proximl/cloudbender/__init__.py +0 -0
  47. {proximl-0.5.6 → proximl-0.5.8}/proximl/cloudbender/datastores.py +0 -0
  48. {proximl-0.5.6 → proximl-0.5.8}/proximl/cloudbender/device_configs.py +0 -0
  49. {proximl-0.5.6 → proximl-0.5.8}/proximl/cloudbender/devices.py +0 -0
  50. {proximl-0.5.6 → proximl-0.5.8}/proximl/cloudbender/nodes.py +0 -0
  51. {proximl-0.5.6 → proximl-0.5.8}/proximl/cloudbender/providers.py +0 -0
  52. {proximl-0.5.6 → proximl-0.5.8}/proximl/cloudbender/regions.py +0 -0
  53. {proximl-0.5.6 → proximl-0.5.8}/proximl/connections.py +0 -0
  54. {proximl-0.5.6 → proximl-0.5.8}/proximl/environments.py +0 -0
  55. {proximl-0.5.6 → proximl-0.5.8}/proximl/exceptions.py +0 -0
  56. {proximl-0.5.6 → proximl-0.5.8}/proximl/gpu_types.py +0 -0
  57. {proximl-0.5.6 → proximl-0.5.8}/proximl/proximl.py +0 -0
  58. {proximl-0.5.6 → proximl-0.5.8}/proximl.egg-info/dependency_links.txt +0 -0
  59. {proximl-0.5.6 → proximl-0.5.8}/proximl.egg-info/entry_points.txt +0 -0
  60. {proximl-0.5.6 → proximl-0.5.8}/proximl.egg-info/requires.txt +0 -0
  61. {proximl-0.5.6 → proximl-0.5.8}/proximl.egg-info/top_level.txt +0 -0
  62. {proximl-0.5.6 → proximl-0.5.8}/setup.cfg +0 -0
  63. {proximl-0.5.6 → proximl-0.5.8}/setup.py +0 -0
  64. {proximl-0.5.6 → proximl-0.5.8}/tests/integration/__init__.py +0 -0
  65. {proximl-0.5.6 → proximl-0.5.8}/tests/integration/cloudbender/__init__.py +0 -0
  66. {proximl-0.5.6 → proximl-0.5.8}/tests/integration/cloudbender/test_providers_integration.py +0 -0
  67. {proximl-0.5.6 → proximl-0.5.8}/tests/integration/conftest.py +0 -0
  68. {proximl-0.5.6 → proximl-0.5.8}/tests/integration/test_checkpoints_integration.py +0 -0
  69. {proximl-0.5.6 → proximl-0.5.8}/tests/integration/test_datasets_integration.py +0 -0
  70. {proximl-0.5.6 → proximl-0.5.8}/tests/integration/test_environments_integration.py +0 -0
  71. {proximl-0.5.6 → proximl-0.5.8}/tests/integration/test_gpu_types_integration.py +0 -0
  72. {proximl-0.5.6 → proximl-0.5.8}/tests/integration/test_models_integration.py +0 -0
  73. {proximl-0.5.6 → proximl-0.5.8}/tests/integration/test_projects_integration.py +0 -0
  74. {proximl-0.5.6 → proximl-0.5.8}/tests/integration/test_volumes_integration.py +0 -0
  75. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/__init__.py +0 -0
  76. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/__init__.py +0 -0
  77. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/cloudbender/__init__.py +0 -0
  78. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/cloudbender/test_cli_datastore_unit.py +0 -0
  79. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/cloudbender/test_cli_device_unit.py +0 -0
  80. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/cloudbender/test_cli_node_unit.py +0 -0
  81. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/cloudbender/test_cli_provider_unit.py +0 -0
  82. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/cloudbender/test_cli_region_unit.py +0 -0
  83. /proximl-0.5.6/tests/unit/cli/cloudbender/test_cli_reservation_unit.py → /proximl-0.5.8/tests/unit/cli/cloudbender/test_cli_service_unit.py +0 -0
  84. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/conftest.py +0 -0
  85. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/test_cli_checkpoint_unit.py +0 -0
  86. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/test_cli_datasets_unit.py +0 -0
  87. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/test_cli_environment_unit.py +0 -0
  88. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/test_cli_gpu_unit.py +0 -0
  89. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/test_cli_job_unit.py +0 -0
  90. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/test_cli_model_unit.py +0 -0
  91. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/test_cli_project_unit.py +0 -0
  92. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cli/test_cli_volume_unit.py +0 -0
  93. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cloudbender/__init__.py +0 -0
  94. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cloudbender/test_datastores_unit.py +0 -0
  95. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cloudbender/test_device_configs_unit.py +0 -0
  96. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cloudbender/test_devices_unit.py +0 -0
  97. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cloudbender/test_nodes_unit.py +0 -0
  98. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cloudbender/test_providers_unit.py +0 -0
  99. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/cloudbender/test_regions_unit.py +0 -0
  100. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/conftest.py +0 -0
  101. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/test_auth.py +0 -0
  102. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/test_checkpoints_unit.py +0 -0
  103. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/test_connections_unit.py +0 -0
  104. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/test_datasets_unit.py +0 -0
  105. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/test_environments_unit.py +0 -0
  106. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/test_exceptions.py +0 -0
  107. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/test_gpu_types_unit.py +0 -0
  108. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/test_jobs_unit.py +0 -0
  109. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/test_models_unit.py +0 -0
  110. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/test_proximl.py +0 -0
  111. {proximl-0.5.6 → proximl-0.5.8}/tests/unit/test_volumes_unit.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: proximl
3
- Version: 0.5.6
3
+ Version: 0.5.8
4
4
  Summary: proxiML client SDK and command line utilities
5
5
  Home-page: https://github.com/proxiML/python-sdk
6
6
  Author: proxiML
@@ -13,5 +13,5 @@ logging.basicConfig(
13
13
  logger = logging.getLogger(__name__)
14
14
 
15
15
 
16
- __version__ = "0.5.6"
16
+ __version__ = "0.5.8"
17
17
  __all__ = "ProxiML"
@@ -23,9 +23,7 @@ class Checkpoints(object):
23
23
 
24
24
  async def list(self, **kwargs):
25
25
  resp = await self.proximl._query(f"/checkpoint", "GET", kwargs)
26
- checkpoints = [
27
- Checkpoint(self.proximl, **checkpoint) for checkpoint in resp
28
- ]
26
+ checkpoints = [Checkpoint(self.proximl, **checkpoint) for checkpoint in resp]
29
27
  return checkpoints
30
28
 
31
29
  async def list_public(self, **kwargs):
@@ -39,8 +37,7 @@ class Checkpoints(object):
39
37
  source_type=source_type,
40
38
  source_uri=source_uri,
41
39
  source_options=kwargs.get("source_options"),
42
- project_uuid=kwargs.get("project_uuid")
43
- or self.proximl.active_project,
40
+ project_uuid=kwargs.get("project_uuid") or self.proximl.active_project,
44
41
  )
45
42
  payload = {k: v for k, v in data.items() if v is not None}
46
43
  logging.info(f"Creating Checkpoint {name}")
@@ -60,12 +57,13 @@ class Checkpoint:
60
57
  def __init__(self, proximl, **kwargs):
61
58
  self.proximl = proximl
62
59
  self._checkpoint = kwargs
63
- self._id = self._checkpoint.get(
64
- "id", self._checkpoint.get("checkpoint_uuid")
65
- )
60
+ self._id = self._checkpoint.get("id", self._checkpoint.get("checkpoint_uuid"))
66
61
  self._status = self._checkpoint.get("status")
67
62
  self._name = self._checkpoint.get("name")
68
- self._size = self._checkpoint.get("size")
63
+ self._size = self._checkpoint.get("size") or self._checkpoint.get("used_size")
64
+ self._billed_size = self._checkpoint.get("billed_size") or self._checkpoint.get(
65
+ "size"
66
+ )
69
67
  self._project_uuid = self._checkpoint.get("project_uuid")
70
68
 
71
69
  @property
@@ -84,6 +82,10 @@ class Checkpoint:
84
82
  def size(self) -> int:
85
83
  return self._size
86
84
 
85
+ @property
86
+ def billed_size(self) -> int:
87
+ return self._billed_size
88
+
87
89
  def __str__(self):
88
90
  return json.dumps({k: v for k, v in self._checkpoint.items()})
89
91
 
@@ -123,15 +125,17 @@ class Checkpoint:
123
125
  entity_type="checkpoint",
124
126
  project_uuid=self._checkpoint.get("project_uuid"),
125
127
  cidr=self._checkpoint.get("vpn").get("cidr"),
126
- ssh_port=self._checkpoint.get("vpn")
127
- .get("client")
128
- .get("ssh_port"),
129
- input_path=self._checkpoint.get("source_uri")
130
- if self.status in ["new", "downloading"]
131
- else None,
132
- output_path=self._checkpoint.get("output_uri")
133
- if self.status == "exporting"
134
- else None,
128
+ ssh_port=self._checkpoint.get("vpn").get("client").get("ssh_port"),
129
+ input_path=(
130
+ self._checkpoint.get("source_uri")
131
+ if self.status in ["new", "downloading"]
132
+ else None
133
+ ),
134
+ output_path=(
135
+ self._checkpoint.get("output_uri")
136
+ if self.status == "exporting"
137
+ else None
138
+ ),
135
139
  )
136
140
  else:
137
141
  details = dict()
@@ -195,9 +199,7 @@ class Checkpoint:
195
199
  if msg_handler:
196
200
  msg_handler(data)
197
201
  else:
198
- timestamp = datetime.fromtimestamp(
199
- int(data.get("time")) / 1000
200
- )
202
+ timestamp = datetime.fromtimestamp(int(data.get("time")) / 1000)
201
203
  print(
202
204
  f"{timestamp.strftime('%m/%d/%Y, %H:%M:%S')}: {data.get('msg').rstrip()}"
203
205
  )
@@ -224,19 +226,24 @@ class Checkpoint:
224
226
  return self
225
227
 
226
228
  async def wait_for(self, status, timeout=300):
229
+ if self.status == status:
230
+ return
227
231
  valid_statuses = ["downloading", "ready", "archived"]
228
232
  if not status in valid_statuses:
229
233
  raise SpecificationError(
230
234
  "status",
231
235
  f"Invalid wait_for status {status}. Valid statuses are: {valid_statuses}",
232
236
  )
233
- if self.status == status:
234
- return
237
+
238
+ MAX_TIMEOUT = 24 * 60 * 60
239
+ if timeout > MAX_TIMEOUT:
240
+ raise SpecificationError(
241
+ "timeout",
242
+ f"timeout must be less than {MAX_TIMEOUT} seconds.",
243
+ )
235
244
  POLL_INTERVAL_MIN = 5
236
245
  POLL_INTERVAL_MAX = 60
237
- POLL_INTERVAL = max(
238
- min(timeout / 60, POLL_INTERVAL_MAX), POLL_INTERVAL_MIN
239
- )
246
+ POLL_INTERVAL = max(min(timeout / 60, POLL_INTERVAL_MAX), POLL_INTERVAL_MIN)
240
247
  retry_count = math.ceil(timeout / POLL_INTERVAL)
241
248
  count = 0
242
249
  while count < retry_count:
@@ -15,4 +15,5 @@ from proximl.cli.cloudbender.region import region
15
15
  from proximl.cli.cloudbender.node import node
16
16
  from proximl.cli.cloudbender.device import device
17
17
  from proximl.cli.cloudbender.datastore import datastore
18
+ from proximl.cli.cloudbender.data_connector import data_connector
18
19
  from proximl.cli.cloudbender.service import service
@@ -0,0 +1,159 @@
1
+ import click
2
+ from proximl.cli import cli, pass_config, search_by_id_name
3
+ from proximl.cli.cloudbender import cloudbender
4
+
5
+
6
+ @cloudbender.group()
7
+ @pass_config
8
+ def data_connector(config):
9
+ """proxiML CloudBender data connector commands."""
10
+ pass
11
+
12
+
13
+ @data_connector.command()
14
+ @click.option(
15
+ "--provider",
16
+ "-p",
17
+ type=click.STRING,
18
+ required=True,
19
+ help="The provider ID of the region.",
20
+ )
21
+ @click.option(
22
+ "--region",
23
+ "-r",
24
+ type=click.STRING,
25
+ required=True,
26
+ help="The region ID to list data connectors for.",
27
+ )
28
+ @pass_config
29
+ def list(config, provider, region):
30
+ """List data connectors."""
31
+ data = [
32
+ ["ID", "NAME", "TYPE"],
33
+ [
34
+ "-" * 80,
35
+ "-" * 80,
36
+ "-" * 80,
37
+ ],
38
+ ]
39
+
40
+ data_connectors = config.proximl.run(
41
+ config.proximl.client.cloudbender.data_connectors.list(
42
+ provider_uuid=provider, region_uuid=region
43
+ )
44
+ )
45
+
46
+ for data_connector in data_connectors:
47
+ data.append(
48
+ [
49
+ data_connector.id,
50
+ data_connector.name,
51
+ data_connector.type,
52
+ ]
53
+ )
54
+
55
+ for row in data:
56
+ click.echo(
57
+ "{: >37.36} {: >29.28} {: >9.8}" "".format(*row),
58
+ file=config.stdout,
59
+ )
60
+
61
+
62
+ @data_connector.command()
63
+ @click.option(
64
+ "--provider",
65
+ "-p",
66
+ type=click.STRING,
67
+ required=True,
68
+ help="The provider ID of the region.",
69
+ )
70
+ @click.option(
71
+ "--region",
72
+ "-r",
73
+ type=click.STRING,
74
+ required=True,
75
+ help="The region ID to create the data_connector in.",
76
+ )
77
+ @click.option(
78
+ "--type",
79
+ "-t",
80
+ type=click.Choice(
81
+ [
82
+ "custom",
83
+ ],
84
+ case_sensitive=False,
85
+ ),
86
+ required=True,
87
+ help="The type of data connector to create.",
88
+ )
89
+ @click.option(
90
+ "--protocol",
91
+ "-r",
92
+ type=click.STRING,
93
+ help="The transport protocol of the data connector",
94
+ )
95
+ @click.option(
96
+ "--port-range",
97
+ "-p",
98
+ type=click.STRING,
99
+ help="The port range of the data connector",
100
+ )
101
+ @click.option(
102
+ "--cidr",
103
+ "-i",
104
+ type=click.STRING,
105
+ help="The IP range to allow in CIDR notation",
106
+ )
107
+ @click.argument("name", type=click.STRING, required=True)
108
+ @pass_config
109
+ def create(config, provider, region, type, protocol, port_range, cidr, name):
110
+ """
111
+ Creates a data_connector.
112
+ """
113
+ return config.proximl.run(
114
+ config.proximl.client.cloudbender.data_connectors.create(
115
+ provider_uuid=provider,
116
+ region_uuid=region,
117
+ name=name,
118
+ type=type,
119
+ protocol=protocol,
120
+ port_range=port_range,
121
+ cidr=cidr,
122
+ )
123
+ )
124
+
125
+
126
+ @data_connector.command()
127
+ @click.option(
128
+ "--provider",
129
+ "-p",
130
+ type=click.STRING,
131
+ required=True,
132
+ help="The provider ID of the region.",
133
+ )
134
+ @click.option(
135
+ "--region",
136
+ "-r",
137
+ type=click.STRING,
138
+ required=True,
139
+ help="The region ID to remove the data_connector from.",
140
+ )
141
+ @click.argument("data_connector", type=click.STRING)
142
+ @pass_config
143
+ def remove(config, provider, region, data_connector):
144
+ """
145
+ Remove a data_connector.
146
+
147
+ DATASTORE may be specified by name or ID, but ID is preferred.
148
+ """
149
+ data_connectors = config.proximl.run(
150
+ config.proximl.client.cloudbender.data_connectors.list(
151
+ provider_uuid=provider, region_uuid=region
152
+ )
153
+ )
154
+
155
+ found = search_by_id_name(data_connector, data_connectors)
156
+ if None is found:
157
+ raise click.UsageError("Cannot find specified data_connector.")
158
+
159
+ return config.proximl.run(found.remove())
@@ -74,6 +74,19 @@ def list(config, provider, region):
74
74
  required=True,
75
75
  help="The region ID to create the service in.",
76
76
  )
77
+ @click.option(
78
+ "--type",
79
+ "-t",
80
+ type=click.Choice(
81
+ [
82
+ "https",
83
+ "tcp",
84
+ "udp",
85
+ ],
86
+ ),
87
+ required=True,
88
+ help="The type of regional service.",
89
+ )
77
90
  @click.option(
78
91
  "--public/--no-public",
79
92
  default=True,
@@ -82,13 +95,17 @@ def list(config, provider, region):
82
95
  )
83
96
  @click.argument("name", type=click.STRING, required=True)
84
97
  @pass_config
85
- def create(config, provider, region, public, name):
98
+ def create(config, provider, region, type, public, name):
86
99
  """
87
100
  Creates a service.
88
101
  """
89
102
  return config.proximl.run(
90
103
  config.proximl.client.cloudbender.services.create(
91
- provider_uuid=provider, region_uuid=region, name=name, public=public
104
+ provider_uuid=provider,
105
+ region_uuid=region,
106
+ name=name,
107
+ type=type,
108
+ public=public,
92
109
  )
93
110
  )
94
111
 
@@ -3,6 +3,7 @@ from .regions import Regions
3
3
  from .nodes import Nodes
4
4
  from .devices import Devices
5
5
  from .datastores import Datastores
6
+ from .data_connectors import DataConnectors
6
7
  from .services import Services
7
8
  from .device_configs import DeviceConfigs
8
9
 
@@ -15,5 +16,6 @@ class Cloudbender(object):
15
16
  self.nodes = Nodes(proximl)
16
17
  self.devices = Devices(proximl)
17
18
  self.datastores = Datastores(proximl)
19
+ self.data_connectors = DataConnectors(proximl)
18
20
  self.services = Services(proximl)
19
21
  self.device_configs = DeviceConfigs(proximl)
@@ -0,0 +1,112 @@
1
+ import json
2
+ import logging
3
+
4
+
5
+ class DataConnectors(object):
6
+ def __init__(self, proximl):
7
+ self.proximl = proximl
8
+
9
+ async def get(self, provider_uuid, region_uuid, id, **kwargs):
10
+ resp = await self.proximl._query(
11
+ f"/provider/{provider_uuid}/region/{region_uuid}/data_connector/{id}",
12
+ "GET",
13
+ kwargs,
14
+ )
15
+ return DataConnector(self.proximl, **resp)
16
+
17
+ async def list(self, provider_uuid, region_uuid, **kwargs):
18
+ resp = await self.proximl._query(
19
+ f"/provider/{provider_uuid}/region/{region_uuid}/data_connector",
20
+ "GET",
21
+ kwargs,
22
+ )
23
+ data_connectors = [
24
+ DataConnector(self.proximl, **data_connector) for data_connector in resp
25
+ ]
26
+ return data_connectors
27
+
28
+ async def create(
29
+ self,
30
+ provider_uuid,
31
+ region_uuid,
32
+ name,
33
+ type,
34
+ **kwargs,
35
+ ):
36
+ logging.info(f"Creating Data Connector {name}")
37
+ data = dict(
38
+ name=name,
39
+ type=type,
40
+ **kwargs,
41
+ )
42
+ payload = {k: v for k, v in data.items() if v is not None}
43
+ resp = await self.proximl._query(
44
+ f"/provider/{provider_uuid}/region/{region_uuid}/data_connector",
45
+ "POST",
46
+ None,
47
+ payload,
48
+ )
49
+ data_connector = DataConnector(self.proximl, **resp)
50
+ logging.info(f"Created Data Connector {name} with id {data_connector.id}")
51
+ return data_connector
52
+
53
+ async def remove(self, provider_uuid, region_uuid, id, **kwargs):
54
+ await self.proximl._query(
55
+ f"/provider/{provider_uuid}/region/{region_uuid}/data_connector/{id}",
56
+ "DELETE",
57
+ kwargs,
58
+ )
59
+
60
+
61
+ class DataConnector:
62
+ def __init__(self, proximl, **kwargs):
63
+ self.proximl = proximl
64
+ self._data_connector = kwargs
65
+ self._id = self._data_connector.get("connector_id")
66
+ self._provider_uuid = self._data_connector.get("provider_uuid")
67
+ self._region_uuid = self._data_connector.get("region_uuid")
68
+ self._type = self._data_connector.get("type")
69
+ self._name = self._data_connector.get("name")
70
+
71
+ @property
72
+ def id(self) -> str:
73
+ return self._id
74
+
75
+ @property
76
+ def provider_uuid(self) -> str:
77
+ return self._provider_uuid
78
+
79
+ @property
80
+ def region_uuid(self) -> str:
81
+ return self._region_uuid
82
+
83
+ @property
84
+ def type(self) -> str:
85
+ return self._type
86
+
87
+ @property
88
+ def name(self) -> str:
89
+ return self._name
90
+
91
+ def __str__(self):
92
+ return json.dumps({k: v for k, v in self._data_connector.items()})
93
+
94
+ def __repr__(self):
95
+ return f"DataConnector( proximl , **{self._data_connector.__repr__()})"
96
+
97
+ def __bool__(self):
98
+ return bool(self._id)
99
+
100
+ async def remove(self):
101
+ await self.proximl._query(
102
+ f"/provider/{self._provider_uuid}/region/{self._region_uuid}/data_connector/{self._id}",
103
+ "DELETE",
104
+ )
105
+
106
+ async def refresh(self):
107
+ resp = await self.proximl._query(
108
+ f"/provider/{self._provider_uuid}/region/{self._region_uuid}/data_connector/{self._id}",
109
+ "GET",
110
+ )
111
+ self.__init__(self.proximl, **resp)
112
+ return self
@@ -1,5 +1,13 @@
1
1
  import json
2
2
  import logging
3
+ import asyncio
4
+ import math
5
+
6
+ from proximl.exceptions import (
7
+ ApiError,
8
+ SpecificationError,
9
+ ProxiMLException,
10
+ )
3
11
 
4
12
 
5
13
  class Services(object):
@@ -28,12 +36,14 @@ class Services(object):
28
36
  provider_uuid,
29
37
  region_uuid,
30
38
  name,
39
+ type,
31
40
  public,
32
41
  **kwargs,
33
42
  ):
34
43
  logging.info(f"Creating Service {name}")
35
44
  data = dict(
36
45
  name=name,
46
+ type=type,
37
47
  public=public,
38
48
  **kwargs,
39
49
  )
@@ -65,7 +75,12 @@ class Service:
65
75
  self._region_uuid = self._service.get("region_uuid")
66
76
  self._public = self._service.get("public")
67
77
  self._name = self._service.get("name")
68
- self._hostname = self._service.get("hostname")
78
+ self._type = self._service.get("type")
79
+ self._hostname = self._service.get("custom_hostname") or self._service.get(
80
+ "hostname"
81
+ )
82
+ self._status = self._service.get("status")
83
+ self._port = self._service.get("port")
69
84
 
70
85
  @property
71
86
  def id(self) -> str:
@@ -91,6 +106,18 @@ class Service:
91
106
  def hostname(self) -> str:
92
107
  return self._hostname
93
108
 
109
+ @property
110
+ def status(self) -> str:
111
+ return self._status
112
+
113
+ @property
114
+ def type(self) -> str:
115
+ return self._type
116
+
117
+ @property
118
+ def port(self) -> str:
119
+ return self._port
120
+
94
121
  def __str__(self):
95
122
  return json.dumps({k: v for k, v in self._service.items()})
96
123
 
@@ -113,3 +140,40 @@ class Service:
113
140
  )
114
141
  self.__init__(self.proximl, **resp)
115
142
  return self
143
+
144
+ async def wait_for(self, status, timeout=300):
145
+ if self.status == status:
146
+ return
147
+ valid_statuses = ["active", "archived"]
148
+ if not status in valid_statuses:
149
+ raise SpecificationError(
150
+ "status",
151
+ f"Invalid wait_for status {status}. Valid statuses are: {valid_statuses}",
152
+ )
153
+ MAX_TIMEOUT = 24 * 60 * 60
154
+ if timeout > MAX_TIMEOUT:
155
+ raise SpecificationError(
156
+ "timeout",
157
+ f"timeout must be less than {MAX_TIMEOUT} seconds.",
158
+ )
159
+
160
+ POLL_INTERVAL_MIN = 5
161
+ POLL_INTERVAL_MAX = 60
162
+ POLL_INTERVAL = max(min(timeout / 60, POLL_INTERVAL_MAX), POLL_INTERVAL_MIN)
163
+ retry_count = math.ceil(timeout / POLL_INTERVAL)
164
+ count = 0
165
+ while count < retry_count:
166
+ await asyncio.sleep(POLL_INTERVAL)
167
+ try:
168
+ await self.refresh()
169
+ except ApiError as e:
170
+ if status == "archived" and e.status == 404:
171
+ return
172
+ raise e
173
+ if self.status == status:
174
+ return self
175
+ else:
176
+ count += 1
177
+ logging.debug(f"self: {self}, retry count {count}")
178
+
179
+ raise ProxiMLException(f"Timeout waiting for {status}")
@@ -60,7 +60,10 @@ class Dataset:
60
60
  self._id = self._dataset.get("id", self._dataset.get("dataset_uuid"))
61
61
  self._status = self._dataset.get("status")
62
62
  self._name = self._dataset.get("name")
63
- self._size = self._dataset.get("size")
63
+ self._size = self._dataset.get("size") or self._dataset.get("used_size")
64
+ self._billed_size = self._dataset.get("billed_size") or self._dataset.get(
65
+ "size"
66
+ )
64
67
  self._project_uuid = self._dataset.get("project_uuid")
65
68
 
66
69
  @property
@@ -79,6 +82,10 @@ class Dataset:
79
82
  def size(self) -> int:
80
83
  return self._size or 0
81
84
 
85
+ @property
86
+ def billed_size(self) -> int:
87
+ return self._billed_size
88
+
82
89
  def __str__(self):
83
90
  return json.dumps({k: v for k, v in self._dataset.items()})
84
91
 
@@ -119,12 +126,16 @@ class Dataset:
119
126
  project_uuid=self._dataset.get("project_uuid"),
120
127
  cidr=self._dataset.get("vpn").get("cidr"),
121
128
  ssh_port=self._dataset.get("vpn").get("client").get("ssh_port"),
122
- input_path=self._dataset.get("source_uri")
123
- if self.status in ["new", "downloading"]
124
- else None,
125
- output_path=self._dataset.get("output_uri")
126
- if self.status == "exporting"
127
- else None,
129
+ input_path=(
130
+ self._dataset.get("source_uri")
131
+ if self.status in ["new", "downloading"]
132
+ else None
133
+ ),
134
+ output_path=(
135
+ self._dataset.get("output_uri")
136
+ if self.status == "exporting"
137
+ else None
138
+ ),
128
139
  )
129
140
  else:
130
141
  details = dict()
@@ -215,14 +226,21 @@ class Dataset:
215
226
  return self
216
227
 
217
228
  async def wait_for(self, status, timeout=300):
229
+ if self.status == status:
230
+ return
218
231
  valid_statuses = ["downloading", "ready", "archived"]
219
232
  if not status in valid_statuses:
220
233
  raise SpecificationError(
221
234
  "status",
222
235
  f"Invalid wait_for status {status}. Valid statuses are: {valid_statuses}",
223
236
  )
224
- if self.status == status:
225
- return
237
+ MAX_TIMEOUT = 24 * 60 * 60
238
+ if timeout > MAX_TIMEOUT:
239
+ raise SpecificationError(
240
+ "timeout",
241
+ f"timeout must be less than {MAX_TIMEOUT} seconds.",
242
+ )
243
+
226
244
  POLL_INTERVAL_MIN = 5
227
245
  POLL_INTERVAL_MAX = 60
228
246
  POLL_INTERVAL = max(min(timeout / 60, POLL_INTERVAL_MAX), POLL_INTERVAL_MIN)
@@ -468,6 +468,12 @@ class Job:
468
468
  return job
469
469
 
470
470
  async def wait_for(self, status, timeout=300):
471
+ if self.status == status or (
472
+ self.type == "training"
473
+ and status == "finished"
474
+ and self.status == "stopped"
475
+ ):
476
+ return
471
477
  valid_statuses = [
472
478
  "waiting for data/model download",
473
479
  "waiting for GPUs",
@@ -492,12 +498,13 @@ class Job:
492
498
  "'stopped' status is deprecated for training jobs, use 'finished' instead.",
493
499
  DeprecationWarning,
494
500
  )
495
- if self.status == status or (
496
- self.type == "training"
497
- and status == "finished"
498
- and self.status == "stopped"
499
- ):
500
- return
501
+
502
+ MAX_TIMEOUT = 24 * 60 * 60
503
+ if timeout > MAX_TIMEOUT:
504
+ raise SpecificationError(
505
+ "timeout",
506
+ f"timeout must be less than {MAX_TIMEOUT} seconds.",
507
+ )
501
508
 
502
509
  POLL_INTERVAL_MIN = 5
503
510
  POLL_INTERVAL_MAX = 60