dtlpy 1.107.8__py3-none-any.whl → 1.109.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. dtlpy/__init__.py +1 -7
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/entities/__init__.py +5 -4
  4. dtlpy/entities/annotation.py +28 -57
  5. dtlpy/entities/annotation_definitions/base_annotation_definition.py +6 -14
  6. dtlpy/entities/app.py +1 -1
  7. dtlpy/entities/command.py +10 -7
  8. dtlpy/entities/compute.py +77 -94
  9. dtlpy/entities/dataset.py +29 -14
  10. dtlpy/entities/dpk.py +1 -0
  11. dtlpy/entities/filters.py +7 -6
  12. dtlpy/entities/item.py +7 -14
  13. dtlpy/entities/node.py +0 -12
  14. dtlpy/entities/service.py +0 -9
  15. dtlpy/entities/service_driver.py +118 -0
  16. dtlpy/entities/trigger.py +1 -1
  17. dtlpy/new_instance.py +1 -1
  18. dtlpy/repositories/__init__.py +2 -1
  19. dtlpy/repositories/apps.py +8 -4
  20. dtlpy/repositories/collections.py +86 -34
  21. dtlpy/repositories/commands.py +14 -4
  22. dtlpy/repositories/computes.py +173 -127
  23. dtlpy/repositories/datasets.py +20 -9
  24. dtlpy/repositories/downloader.py +20 -8
  25. dtlpy/repositories/dpks.py +26 -1
  26. dtlpy/repositories/items.py +5 -2
  27. dtlpy/repositories/service_drivers.py +213 -0
  28. dtlpy/repositories/services.py +6 -0
  29. dtlpy/repositories/uploader.py +4 -0
  30. dtlpy-1.109.19.dist-info/METADATA +172 -0
  31. {dtlpy-1.107.8.dist-info → dtlpy-1.109.19.dist-info}/RECORD +39 -37
  32. tests/features/environment.py +16 -15
  33. dtlpy-1.107.8.dist-info/METADATA +0 -69
  34. {dtlpy-1.107.8.data → dtlpy-1.109.19.data}/scripts/dlp +0 -0
  35. {dtlpy-1.107.8.data → dtlpy-1.109.19.data}/scripts/dlp.bat +0 -0
  36. {dtlpy-1.107.8.data → dtlpy-1.109.19.data}/scripts/dlp.py +0 -0
  37. {dtlpy-1.107.8.dist-info → dtlpy-1.109.19.dist-info}/LICENSE +0 -0
  38. {dtlpy-1.107.8.dist-info → dtlpy-1.109.19.dist-info}/WHEEL +0 -0
  39. {dtlpy-1.107.8.dist-info → dtlpy-1.109.19.dist-info}/entry_points.txt +0 -0
  40. {dtlpy-1.107.8.dist-info → dtlpy-1.109.19.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py CHANGED
@@ -77,7 +77,7 @@ from .entities import (
77
77
  # triggers
78
78
  TriggerResource, TriggerAction, TriggerExecutionMode, TriggerType,
79
79
  # faas
80
- FunctionIO, KubernetesAutoscalerType, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
80
+ FunctionIO, KubernetesAutoscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
81
81
  InstanceCatalog, PackageInputType, ServiceType, ServiceModeType, KubernetesRPSAutoscaler,
82
82
  PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
83
83
  # roberto
@@ -232,12 +232,6 @@ def checkout_state():
232
232
  state = client_api.state_io.read_json()
233
233
  return state
234
234
 
235
-
236
- def use_attributes_2(state: bool = True):
237
- warnings.warn("Function 'use_attributes_2()' is deprecated as of version 1.99.12 and has been non-functional since version 1.90.39. To work with attributes 2.0, simply use 'update_attributes()'.", DeprecationWarning)
238
- client_api.attributes_mode.use_attributes_2 = state
239
-
240
-
241
235
  class LoggingLevel:
242
236
  DEBUG = "debug"
243
237
  WARNING = "warning"
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.107.8'
1
+ version = '1.109.19'
@@ -43,7 +43,7 @@ from .package_slot import PackageSlot, SlotPostAction, SlotPostActionType, SlotD
43
43
  UiBindingPanel
44
44
  from .package_function import PackageFunction, FunctionIO, PackageInputType
45
45
  from .time_series import TimeSeries
46
- from .service import Service, KubernetesAutoscalerType, KubernetesAutuscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRPSAutoscaler, \
46
+ from .service import Service, KubernetesAutoscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRPSAutoscaler, \
47
47
  InstanceCatalog, KubernetesRuntime, ServiceType, ServiceModeType
48
48
  from .execution import Execution, ExecutionStatus
49
49
  from .command import Command, CommandsStatus
@@ -75,8 +75,9 @@ from .app_module import AppModule
75
75
  from .resource_execution import ResourceExecution
76
76
  from .message import Message, NotificationEventContext
77
77
  from .prompt_item import Prompt, PromptItem, PromptType
78
- from .compute import ClusterProvider, ComputeType, ComputeStatus, Toleration, DeploymentResource, DeploymentResources, \
79
- NodePool, AuthenticationIntegration, Authentication, ComputeCluster, ComputeContext, Compute, KubernetesCompute, \
80
- ServiceDriver
78
+ from .compute import ClusterProvider, ComputeType, ComputeStatus, Toleration, DeploymentResource, DeploymentResources, ComputeSettings, ComputeConsumptionMethod, \
79
+ NodePool, AuthenticationIntegration, Authentication, ComputeCluster, ComputeContext, Compute, KubernetesCompute
80
+ from .service_driver import ServiceDriver
81
81
  from .gis_item import ItemGis, Layer
82
82
  from .collection import Collection
83
+
@@ -45,6 +45,8 @@ class AnnotationType(str, Enum):
45
45
  SUBTITLE = "subtitle"
46
46
  TEXT = "text_mark"
47
47
  GIS = "gis"
48
+ SEMANTIC_3D = "ref_semantic_3d"
49
+ POLYLINE_3D = "polyline_3d"
48
50
 
49
51
 
50
52
  class ViewAnnotationOptions(str, Enum):
@@ -130,9 +132,7 @@ class Annotation(entities.BaseEntity):
130
132
  _annotations = attr.ib(repr=False, default=None)
131
133
  __client_api = attr.ib(default=None, repr=False)
132
134
  _items = attr.ib(repr=False, default=None)
133
-
134
- # temp
135
- _recipe_2_attributes = attr.ib(repr=False, default=None)
135
+ _recipe_1_attributes = attr.ib(repr=False, default=None)
136
136
 
137
137
  ############
138
138
  # Platform #
@@ -422,28 +422,11 @@ class Annotation(entities.BaseEntity):
422
422
 
423
423
  @property
424
424
  def attributes(self):
425
- if self._recipe_2_attributes is not None or self.annotation_definition.attributes == []:
426
- return self._recipe_2_attributes
427
425
  return self.annotation_definition.attributes
428
426
 
429
427
  @attributes.setter
430
428
  def attributes(self, attributes):
431
- if isinstance(attributes, dict):
432
- self._recipe_2_attributes = attributes
433
- elif isinstance(attributes, list):
434
- warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead. "
435
- "For more details, refer to the documentation: "
436
- "https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
437
- DeprecationWarning,
438
- )
439
- self.annotation_definition.attributes = attributes
440
- elif attributes is None:
441
- if self._recipe_2_attributes:
442
- self._recipe_2_attributes = {}
443
- if self.annotation_definition.attributes:
444
- self.annotation_definition.attributes = []
445
- else:
446
- raise ValueError('Attributes must be a dictionary or a list')
429
+ self.annotation_definition.attributes = attributes
447
430
 
448
431
  @property
449
432
  def color(self):
@@ -1367,9 +1350,9 @@ class Annotation(entities.BaseEntity):
1367
1350
  status = _json['metadata']['system'].get('status', status)
1368
1351
 
1369
1352
  named_attributes = metadata.get('system', dict()).get('attributes', None)
1370
- attributes = named_attributes if named_attributes else _json.get('attributes', None)
1353
+ recipe_1_attributes = _json.get('attributes', None)
1371
1354
 
1372
- first_frame_attributes = attributes
1355
+ first_frame_attributes = recipe_1_attributes
1373
1356
  first_frame_coordinates = list()
1374
1357
  first_frame_number = 0
1375
1358
  first_frame_start_time = 0
@@ -1425,7 +1408,7 @@ class Annotation(entities.BaseEntity):
1425
1408
  def_dict = {'type': _json['type'],
1426
1409
  'coordinates': coordinates,
1427
1410
  'label': _json['label'],
1428
- 'attributes': attributes}
1411
+ 'attributes': named_attributes}
1429
1412
  annotation_definition = FrameAnnotation.json_to_annotation_definition(def_dict)
1430
1413
 
1431
1414
  frames = entities.ReflectDict(
@@ -1470,9 +1453,9 @@ class Annotation(entities.BaseEntity):
1470
1453
  start_frame=start_frame,
1471
1454
  annotations=annotations,
1472
1455
  start_time=start_time,
1473
- recipe_2_attributes=named_attributes,
1474
1456
  label_suggestions=_json.get('labelSuggestions', None),
1475
- source=_json.get('source', None)
1457
+ source=_json.get('source', None),
1458
+ recipe_1_attributes=recipe_1_attributes,
1476
1459
  )
1477
1460
  annotation.annotation_definition = annotation_definition
1478
1461
  annotation.__client_api = client_api
@@ -1598,15 +1581,9 @@ class Annotation(entities.BaseEntity):
1598
1581
  if isinstance(self.annotation_definition, entities.Description):
1599
1582
  _json['metadata']['system']['system'] = True
1600
1583
 
1601
- if self._recipe_2_attributes is not None:
1602
- _json['metadata']['system']['attributes'] = self._recipe_2_attributes
1603
- if 'attributes' in self._platform_dict:
1604
- _json['attributes'] = self._platform_dict['attributes']
1605
- else:
1606
- _json['attributes'] = self.attributes
1607
- orig_metadata_system = self._platform_dict.get('metadata', {}).get('system', {})
1608
- if 'attributes' in orig_metadata_system:
1609
- _json['metadata']['system']['attributes'] = orig_metadata_system['attributes']
1584
+ _json['metadata']['system']['attributes'] = self.attributes if self.attributes is not None else dict()
1585
+ _json['attributes'] = self._recipe_1_attributes
1586
+
1610
1587
 
1611
1588
  # add frame info
1612
1589
  if self.is_video or (self.end_time and self.end_time > 0) or (self.end_frame and self.end_frame > 0):
@@ -1643,7 +1620,7 @@ class Annotation(entities.BaseEntity):
1643
1620
  :return: page of scores
1644
1621
  """
1645
1622
  return self.annotations.task_scores(annotation_id=self.id ,task_id=task_id, page_offset=page_offset, page_size=page_size)
1646
-
1623
+
1647
1624
 
1648
1625
 
1649
1626
  @attr.s
@@ -1663,8 +1640,8 @@ class FrameAnnotation(entities.BaseEntity):
1663
1640
  object_visible = attr.ib()
1664
1641
 
1665
1642
  # temp
1666
- _recipe_2_attributes = attr.ib(repr=False, default=None)
1667
1643
  _interpolation = attr.ib(repr=False, default=False)
1644
+ _recipe_1_attributes = attr.ib(repr=False, default=None)
1668
1645
 
1669
1646
  ################################
1670
1647
  # parent annotation attributes #
@@ -1696,23 +1673,11 @@ class FrameAnnotation(entities.BaseEntity):
1696
1673
 
1697
1674
  @property
1698
1675
  def attributes(self):
1699
- if self._recipe_2_attributes or self.annotation_definition.attributes == []:
1700
- return self._recipe_2_attributes
1701
1676
  return self.annotation_definition.attributes
1702
1677
 
1703
1678
  @attributes.setter
1704
1679
  def attributes(self, attributes):
1705
- if isinstance(attributes, dict):
1706
- self._recipe_2_attributes = attributes
1707
- elif isinstance(attributes, list):
1708
- warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead. "
1709
- "For more details, refer to the documentation: "
1710
- "https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
1711
- DeprecationWarning,
1712
- )
1713
- self.annotation_definition.attributes = attributes
1714
- else:
1715
- raise ValueError('Attributes must be a dictionary or a list')
1680
+ self.annotation_definition.attributes = attributes
1716
1681
 
1717
1682
  @property
1718
1683
  def geo(self):
@@ -1793,6 +1758,11 @@ class FrameAnnotation(entities.BaseEntity):
1793
1758
 
1794
1759
  @staticmethod
1795
1760
  def json_to_annotation_definition(_json):
1761
+ if 'namedAttributes' in _json:
1762
+ _json['attributes'] = _json['namedAttributes']
1763
+ else:
1764
+ if not isinstance(_json.get('attributes'), dict):
1765
+ _json['attributes'] = None
1796
1766
  if _json['type'] == 'segment':
1797
1767
  annotation = entities.Polygon.from_json(_json)
1798
1768
  elif _json['type'] == 'polyline':
@@ -1866,6 +1836,7 @@ class FrameAnnotation(entities.BaseEntity):
1866
1836
  """
1867
1837
  # get annotation class
1868
1838
  _json['type'] = annotation.type
1839
+ attrs = _json.get('attributes', None)
1869
1840
  annotation_definition = cls.json_to_annotation_definition(_json=_json)
1870
1841
 
1871
1842
  frame_num = _json.get('frame', annotation.last_frame + 1)
@@ -1879,9 +1850,7 @@ class FrameAnnotation(entities.BaseEntity):
1879
1850
  frame_num=frame_num,
1880
1851
  fixed=_json.get('fixed', False),
1881
1852
  object_visible=_json.get('objectVisible', True),
1882
-
1883
- # temp
1884
- recipe_2_attributes=_json.get('namedAttributes', None)
1853
+ recipe_1_attributes=attrs,
1885
1854
  )
1886
1855
 
1887
1856
  def to_snapshot(self):
@@ -1898,9 +1867,11 @@ class FrameAnnotation(entities.BaseEntity):
1898
1867
  if self.annotation_definition.description is not None:
1899
1868
  snapshot_dict['description'] = self.annotation_definition.description
1900
1869
 
1901
- if self.annotation._recipe_2_attributes:
1902
- snapshot_dict['namedAttributes'] = self._recipe_2_attributes
1903
- else:
1904
- snapshot_dict['attributes'] = self.attributes
1870
+ if self.attributes is not None:
1871
+ snapshot_dict['namedAttributes'] = self.attributes
1872
+
1873
+ if self._recipe_1_attributes is not None:
1874
+ snapshot_dict['attributes'] = self._recipe_1_attributes
1875
+
1905
1876
 
1906
1877
  return snapshot_dict
@@ -13,14 +13,9 @@ class BaseAnnotationDefinition:
13
13
  self._bottom = 0
14
14
  self._right = 0
15
15
  self._annotation = None
16
-
17
- if isinstance(attributes, list) and len(attributes) > 0:
18
- warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead."
19
- "For more details, refer to the documentation: "
20
- "https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
21
- DeprecationWarning,
22
- )
23
- self._attributes = attributes
16
+ if attributes and not isinstance(attributes, dict):
17
+ raise TypeError('attributes should be a dictionary')
18
+ self._attributes = attributes or {}
24
19
 
25
20
  @property
26
21
  def attributes(self):
@@ -28,13 +23,10 @@ class BaseAnnotationDefinition:
28
23
 
29
24
  @attributes.setter
30
25
  def attributes(self, v):
31
- if isinstance(v, list):
32
- warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead. "
33
- "For more details, refer to the documentation: "
34
- "https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
35
- DeprecationWarning,
36
- )
26
+ if v and not isinstance(v, dict):
27
+ raise TypeError('attributes should be a dictionary')
37
28
  self._attributes = v
29
+
38
30
  @property
39
31
  def top(self):
40
32
  return self._top
dtlpy/entities/app.py CHANGED
@@ -60,7 +60,7 @@ class App(entities.BaseEntity):
60
60
  reps = namedtuple('repositories', field_names=['projects', 'apps', 'compositions'])
61
61
  return reps(
62
62
  projects=repositories.Projects(client_api=self._client_api),
63
- apps=repositories.Apps(client_api=self._client_api, project=self._project),
63
+ apps=repositories.Apps(client_api=self._client_api, project=self._project, project_id=self.project_id),
64
64
  compositions=repositories.Compositions(client_api=self._client_api, project=self._project)
65
65
  )
66
66
 
dtlpy/entities/command.py CHANGED
@@ -146,21 +146,24 @@ class Command(entities.BaseEntity):
146
146
  entities.CommandsStatus.ABORTED
147
147
  ]
148
148
 
149
- def wait(self, timeout=0, step=None, backoff_factor=1):
149
+ def wait(self, timeout=0, step=None, backoff_factor=1, iteration_callback=None):
150
150
  """
151
151
  Wait for Command to finish
152
152
 
153
153
  :param int timeout: int, seconds to wait until TimeoutError is raised. if 0 - wait until done
154
154
  :param int step: int, seconds between polling
155
155
  :param float backoff_factor: A backoff factor to apply between attempts after the second try
156
+ :param function iteration_callback: function to call on each iteration
156
157
  :return: Command object
157
158
  """
158
159
  if not self.in_progress():
159
160
  return self
160
161
 
161
- return self.commands.wait(command_id=self.id,
162
- timeout=timeout,
163
- step=step,
164
- url=self.url,
165
- backoff_factor=backoff_factor
166
- )
162
+ return self.commands.wait(
163
+ command_id=self.id,
164
+ timeout=timeout,
165
+ step=step,
166
+ url=self.url,
167
+ backoff_factor=backoff_factor,
168
+ iteration_callback=iteration_callback
169
+ )
dtlpy/entities/compute.py CHANGED
@@ -1,3 +1,4 @@
1
+ import traceback
1
2
  from enum import Enum
2
3
  from typing import List, Optional, Dict
3
4
  from ..services.api_client import ApiClient
@@ -10,6 +11,7 @@ class ClusterProvider(str, Enum):
10
11
  AZURE = 'azure'
11
12
  HPC = 'hpc'
12
13
  LOCAL = 'local'
14
+ RANCHER_K3S = 'rancher-k3s'
13
15
 
14
16
 
15
17
  class ComputeType(str, Enum):
@@ -21,6 +23,31 @@ class ComputeStatus(str, Enum):
21
23
  INITIALIZING = "initializing"
22
24
  PAUSE = "pause"
23
25
  FAILED = "failed"
26
+ VALIDATING = "validating"
27
+
28
+
29
+ class ComputeConsumptionMethod(str, Enum):
30
+ MQ = "MQ",
31
+ API = "API"
32
+
33
+
34
+ class ComputeSettings:
35
+ def __init__(self, default_namespace: str, consumption_method: ComputeConsumptionMethod):
36
+ self.consumption_method = consumption_method
37
+ self.default_namespace = default_namespace
38
+
39
+ @classmethod
40
+ def from_json(cls, _json):
41
+ return cls(
42
+ default_namespace=_json.get('defaultNamespace'),
43
+ consumption_method=_json.get('consumptionMethod')
44
+ )
45
+
46
+ def to_json(self):
47
+ return {
48
+ 'defaultNamespace': self.default_namespace,
49
+ 'consumptionMethod': self.consumption_method
50
+ }
24
51
 
25
52
 
26
53
  class Toleration:
@@ -80,8 +107,8 @@ class DeploymentResources:
80
107
  @classmethod
81
108
  def from_json(cls, _json):
82
109
  return cls(
83
- request=DeploymentResource.from_json(_json.get('request', dict())),
84
- limit=DeploymentResource.from_json(_json.get('limit', dict()))
110
+ request=DeploymentResource.from_json(_json.get('request') or dict()),
111
+ limit=DeploymentResource.from_json(_json.get('limit') or dict())
85
112
  )
86
113
 
87
114
  def to_json(self):
@@ -100,7 +127,7 @@ class NodePool:
100
127
  tolerations: Optional[List[Toleration]] = None,
101
128
  description: str = "",
102
129
  node_selector: str = "",
103
- preemtible: bool = False,
130
+ preemptible: bool = False,
104
131
  deployment_resources: DeploymentResources = None
105
132
  ):
106
133
  self.name = name
@@ -109,7 +136,7 @@ class NodePool:
109
136
  self.tolerations = tolerations if tolerations is not None else []
110
137
  self.description = description
111
138
  self.node_selector = node_selector
112
- self.preemtible = preemtible
139
+ self.preemptible = preemptible
113
140
  self.deployment_resources = deployment_resources
114
141
 
115
142
  @classmethod
@@ -120,7 +147,7 @@ class NodePool:
120
147
  dl_types=_json.get('dlTypes'),
121
148
  description=_json.get('description'),
122
149
  node_selector=_json.get('nodeSelector'),
123
- preemtible=_json.get('preemtible'),
150
+ preemptible=_json.get('preemptible'),
124
151
  deployment_resources=DeploymentResources.from_json(_json.get('deploymentResources', dict())),
125
152
  tolerations=[Toleration.from_json(t) for t in _json.get('tolerations', list())]
126
153
  )
@@ -133,7 +160,7 @@ class NodePool:
133
160
  'isDlTypeDefault': self.is_dl_type_default,
134
161
  'description': self.description,
135
162
  'nodeSelector': self.node_selector,
136
- 'preemtible': self.preemtible,
163
+ 'preemptible': self.preemptible,
137
164
  'deploymentResources': self.deployment_resources.to_json(),
138
165
  'tolerations': [t.to_json() for t in self.tolerations]
139
166
  }
@@ -271,6 +298,8 @@ class Compute:
271
298
  type: ComputeType = ComputeType.KUBERNETES,
272
299
  features: Optional[Dict] = None,
273
300
  metadata: Optional[Dict] = None,
301
+ settings: Optional[ComputeSettings] = None,
302
+ url: Optional[str] = None
274
303
  ):
275
304
  self.id = id
276
305
  self.name = name
@@ -284,6 +313,8 @@ class Compute:
284
313
  self._client_api = client_api
285
314
  self._computes = None
286
315
  self._serviceDrivers = None
316
+ self.settings = settings
317
+ self.url = url
287
318
 
288
319
  @property
289
320
  def computes(self):
@@ -291,18 +322,30 @@ class Compute:
291
322
  self._computes = repositories.Computes(client_api=self._client_api)
292
323
  return self._computes
293
324
 
294
- @property
295
- def service_drivers(self):
296
- if self._serviceDrivers is None:
297
- self._serviceDrivers = repositories.ServiceDrivers(client_api=self._client_api)
298
- return self._serviceDrivers
299
-
300
325
  def delete(self):
301
326
  return self.computes.delete(compute_id=self.id)
302
327
 
303
328
  def update(self):
304
329
  return self.computes.update(compute=self)
305
330
 
331
+ @staticmethod
332
+ def _protected_from_json(_json: dict, client_api: ApiClient):
333
+ """
334
+ Same as from_json but with try-except to catch if error
335
+
336
+ :param _json: platform json
337
+ :param client_api: ApiClient entity
338
+ :return:
339
+ """
340
+ try:
341
+ compute = Compute.from_json(_json=_json,
342
+ client_api=client_api)
343
+ status = True
344
+ except Exception:
345
+ compute = traceback.format_exc()
346
+ status = False
347
+ return status, compute
348
+
306
349
  @classmethod
307
350
  def from_json(cls, _json, client_api: ApiClient):
308
351
  return cls(
@@ -315,19 +358,24 @@ class Compute:
315
358
  type=ComputeType(_json.get('type')),
316
359
  features=_json.get('features'),
317
360
  client_api=client_api,
318
- metadata=_json.get('metadata')
361
+ metadata=_json.get('metadata'),
362
+ settings=ComputeSettings.from_json(_json.get('settings', dict())) if _json.get('settings') else None,
363
+ url=_json.get('url'),
319
364
  )
320
365
 
321
366
  def to_json(self):
322
367
  return {
323
368
  'id': self.id,
369
+ 'name': self.name,
324
370
  'context': self.context.to_json(),
325
371
  'sharedContexts': [sc.to_json() for sc in self.shared_contexts],
326
372
  'global': self.global_,
327
373
  'status': self.status.value,
328
374
  'type': self.type.value,
329
375
  'features': self.features,
330
- 'metadata': self.metadata
376
+ 'metadata': self.metadata,
377
+ 'settings': self.settings.to_json() if isinstance(self.settings, ComputeSettings) else self.settings,
378
+ 'url': self.url
331
379
  }
332
380
 
333
381
 
@@ -335,6 +383,7 @@ class KubernetesCompute(Compute):
335
383
  def __init__(
336
384
  self,
337
385
  id: str,
386
+ name: str,
338
387
  context: ComputeContext,
339
388
  cluster: ComputeCluster,
340
389
  shared_contexts: Optional[List[ComputeContext]] = None,
@@ -343,16 +392,20 @@ class KubernetesCompute(Compute):
343
392
  type: ComputeType = ComputeType.KUBERNETES,
344
393
  features: Optional[Dict] = None,
345
394
  metadata: Optional[Dict] = None,
346
- client_api: ApiClient = None
395
+ client_api: ApiClient = None,
396
+ settings: Optional[ComputeSettings] = None,
397
+ url: Optional[str] = None
347
398
  ):
348
399
  super().__init__(id=id, context=context, shared_contexts=shared_contexts, global_=global_, status=status,
349
- type=type, features=features, metadata=metadata, client_api=client_api)
400
+ type=type, features=features, metadata=metadata, client_api=client_api, settings=settings,
401
+ name=name, url=url)
350
402
  self.cluster = cluster
351
403
 
352
404
  @classmethod
353
405
  def from_json(cls, _json, client_api: ApiClient):
354
406
  return cls(
355
407
  id=_json.get('id'),
408
+ name=_json.get('name'),
356
409
  context=ComputeContext.from_json(_json.get('context', dict())),
357
410
  cluster=ComputeCluster.from_json(_json.get('cluster', dict())),
358
411
  shared_contexts=[ComputeContext.from_json(sc) for sc in _json.get('sharedContexts', list())],
@@ -361,93 +414,23 @@ class KubernetesCompute(Compute):
361
414
  type=ComputeType(_json.get('type')),
362
415
  features=_json.get('features'),
363
416
  metadata=_json.get('metadata'),
364
- client_api=client_api
417
+ client_api=client_api,
418
+ settings=ComputeSettings.from_json(_json.get('settings', dict())) if _json.get('settings') else None,
419
+ url=_json.get('url'),
365
420
  )
366
421
 
367
422
  def to_json(self):
368
423
  return {
369
424
  'id': self.id,
425
+ 'name': self.name,
370
426
  'context': self.context.to_json(),
371
427
  'cluster': self.cluster.to_json(),
372
428
  'sharedContexts': [sc.to_json() for sc in self.shared_contexts],
373
429
  'global': self.global_,
374
430
  'status': self.status.value,
375
431
  'type': self.type.value,
376
- 'features': self.features
377
- }
378
-
379
-
380
- class ServiceDriver:
381
- def __init__(
382
- self,
383
- name: str,
384
- context: ComputeContext,
385
- compute_id: str,
386
- client_api: ApiClient,
387
- type: ComputeType = None,
388
- created_at: str = None,
389
- updated_at: str = None,
390
- namespace: str = None,
391
- metadata: Dict = None,
392
- url: str = None,
393
- archived: bool = None,
394
- id: str = None,
395
- is_cache_available: bool = None
396
- ):
397
- self.name = name
398
- self.context = context
399
- self.compute_id = compute_id
400
- self.client_api = client_api
401
- self.type = type or ComputeType.KUBERNETES
402
- self.created_at = created_at
403
- self.updated_at = updated_at
404
- self.namespace = namespace
405
- self.metadata = metadata
406
- self.url = url
407
- self.archived = archived
408
- self.id = id
409
- self.is_cache_available = is_cache_available
410
-
411
- @classmethod
412
- def from_json(cls, _json, client_api: ApiClient):
413
- return cls(
414
- name=_json.get('name'),
415
- context=ComputeContext.from_json(_json.get('context', dict())),
416
- compute_id=_json.get('computeId'),
417
- client_api=client_api,
418
- type=_json.get('type', None),
419
- created_at=_json.get('createdAt', None),
420
- updated_at=_json.get('updatedAt', None),
421
- namespace=_json.get('namespace', None),
422
- metadata=_json.get('metadata', None),
423
- url=_json.get('url', None),
424
- archived=_json.get('archived', None),
425
- id=_json.get('id', None),
426
- is_cache_available=_json.get('isCacheAvailable', None)
427
- )
428
-
429
- def to_json(self):
430
- _json = {
431
- 'name': self.name,
432
- 'context': self.context.to_json(),
433
- 'computeId': self.compute_id,
434
- 'type': self.type,
432
+ 'features': self.features,
433
+ 'metadata': self.metadata,
434
+ 'settings': self.settings.to_json() if isinstance(self.settings, ComputeSettings) else self.settings,
435
+ 'url': self.url
435
436
  }
436
- if self.created_at is not None:
437
- _json['createdAt'] = self.namespace
438
- if self.updated_at is not None:
439
- _json['updatedAt'] = self.updated_at
440
- if self.namespace is not None:
441
- _json['namespace'] = self.namespace
442
- if self.metadata is not None:
443
- _json['metadata'] = self.metadata
444
- if self.url is not None:
445
- _json['url'] = self.url
446
- if self.archived is not None:
447
- _json['archived'] = self.archived
448
- if self.id is not None:
449
- _json['id'] = self.id
450
- if self.is_cache_available is not None:
451
- _json['isCacheAvailable'] = self.is_cache_available
452
-
453
- return _json