dtlpy 1.108.7__py3-none-any.whl → 1.109.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +1 -7
- dtlpy/__version__.py +1 -1
- dtlpy/entities/__init__.py +3 -3
- dtlpy/entities/annotation.py +26 -57
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +6 -14
- dtlpy/entities/command.py +10 -7
- dtlpy/entities/compute.py +40 -91
- dtlpy/entities/dataset.py +29 -14
- dtlpy/entities/dpk.py +1 -0
- dtlpy/entities/filters.py +3 -1
- dtlpy/entities/item.py +7 -14
- dtlpy/entities/node.py +0 -12
- dtlpy/entities/service.py +0 -9
- dtlpy/entities/service_driver.py +118 -0
- dtlpy/entities/trigger.py +1 -1
- dtlpy/new_instance.py +1 -1
- dtlpy/repositories/__init__.py +2 -1
- dtlpy/repositories/collections.py +86 -34
- dtlpy/repositories/commands.py +14 -4
- dtlpy/repositories/computes.py +160 -123
- dtlpy/repositories/datasets.py +20 -9
- dtlpy/repositories/downloader.py +20 -8
- dtlpy/repositories/dpks.py +26 -1
- dtlpy/repositories/items.py +5 -2
- dtlpy/repositories/service_drivers.py +213 -0
- dtlpy/repositories/services.py +6 -0
- dtlpy-1.109.19.dist-info/METADATA +172 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/RECORD +35 -33
- dtlpy-1.108.7.dist-info/METADATA +0 -82
- {dtlpy-1.108.7.data → dtlpy-1.109.19.data}/scripts/dlp +0 -0
- {dtlpy-1.108.7.data → dtlpy-1.109.19.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.108.7.data → dtlpy-1.109.19.data}/scripts/dlp.py +0 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/LICENSE +0 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/WHEEL +0 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py
CHANGED
|
@@ -77,7 +77,7 @@ from .entities import (
|
|
|
77
77
|
# triggers
|
|
78
78
|
TriggerResource, TriggerAction, TriggerExecutionMode, TriggerType,
|
|
79
79
|
# faas
|
|
80
|
-
FunctionIO, KubernetesAutoscalerType,
|
|
80
|
+
FunctionIO, KubernetesAutoscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
|
|
81
81
|
InstanceCatalog, PackageInputType, ServiceType, ServiceModeType, KubernetesRPSAutoscaler,
|
|
82
82
|
PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
|
|
83
83
|
# roberto
|
|
@@ -232,12 +232,6 @@ def checkout_state():
|
|
|
232
232
|
state = client_api.state_io.read_json()
|
|
233
233
|
return state
|
|
234
234
|
|
|
235
|
-
|
|
236
|
-
def use_attributes_2(state: bool = True):
|
|
237
|
-
warnings.warn("Function 'use_attributes_2()' is deprecated as of version 1.99.12 and has been non-functional since version 1.90.39. To work with attributes 2.0, simply use 'update_attributes()'.", DeprecationWarning)
|
|
238
|
-
client_api.attributes_mode.use_attributes_2 = state
|
|
239
|
-
|
|
240
|
-
|
|
241
235
|
class LoggingLevel:
|
|
242
236
|
DEBUG = "debug"
|
|
243
237
|
WARNING = "warning"
|
dtlpy/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = '1.
|
|
1
|
+
version = '1.109.19'
|
dtlpy/entities/__init__.py
CHANGED
|
@@ -43,7 +43,7 @@ from .package_slot import PackageSlot, SlotPostAction, SlotPostActionType, SlotD
|
|
|
43
43
|
UiBindingPanel
|
|
44
44
|
from .package_function import PackageFunction, FunctionIO, PackageInputType
|
|
45
45
|
from .time_series import TimeSeries
|
|
46
|
-
from .service import Service, KubernetesAutoscalerType,
|
|
46
|
+
from .service import Service, KubernetesAutoscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRPSAutoscaler, \
|
|
47
47
|
InstanceCatalog, KubernetesRuntime, ServiceType, ServiceModeType
|
|
48
48
|
from .execution import Execution, ExecutionStatus
|
|
49
49
|
from .command import Command, CommandsStatus
|
|
@@ -76,8 +76,8 @@ from .resource_execution import ResourceExecution
|
|
|
76
76
|
from .message import Message, NotificationEventContext
|
|
77
77
|
from .prompt_item import Prompt, PromptItem, PromptType
|
|
78
78
|
from .compute import ClusterProvider, ComputeType, ComputeStatus, Toleration, DeploymentResource, DeploymentResources, ComputeSettings, ComputeConsumptionMethod, \
|
|
79
|
-
NodePool, AuthenticationIntegration, Authentication, ComputeCluster, ComputeContext, Compute, KubernetesCompute
|
|
80
|
-
|
|
79
|
+
NodePool, AuthenticationIntegration, Authentication, ComputeCluster, ComputeContext, Compute, KubernetesCompute
|
|
80
|
+
from .service_driver import ServiceDriver
|
|
81
81
|
from .gis_item import ItemGis, Layer
|
|
82
82
|
from .collection import Collection
|
|
83
83
|
|
dtlpy/entities/annotation.py
CHANGED
|
@@ -132,9 +132,7 @@ class Annotation(entities.BaseEntity):
|
|
|
132
132
|
_annotations = attr.ib(repr=False, default=None)
|
|
133
133
|
__client_api = attr.ib(default=None, repr=False)
|
|
134
134
|
_items = attr.ib(repr=False, default=None)
|
|
135
|
-
|
|
136
|
-
# temp
|
|
137
|
-
_recipe_2_attributes = attr.ib(repr=False, default=None)
|
|
135
|
+
_recipe_1_attributes = attr.ib(repr=False, default=None)
|
|
138
136
|
|
|
139
137
|
############
|
|
140
138
|
# Platform #
|
|
@@ -424,28 +422,11 @@ class Annotation(entities.BaseEntity):
|
|
|
424
422
|
|
|
425
423
|
@property
|
|
426
424
|
def attributes(self):
|
|
427
|
-
if self._recipe_2_attributes is not None or self.annotation_definition.attributes == []:
|
|
428
|
-
return self._recipe_2_attributes
|
|
429
425
|
return self.annotation_definition.attributes
|
|
430
426
|
|
|
431
427
|
@attributes.setter
|
|
432
428
|
def attributes(self, attributes):
|
|
433
|
-
|
|
434
|
-
self._recipe_2_attributes = attributes
|
|
435
|
-
elif isinstance(attributes, list):
|
|
436
|
-
warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead. "
|
|
437
|
-
"For more details, refer to the documentation: "
|
|
438
|
-
"https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
|
|
439
|
-
DeprecationWarning,
|
|
440
|
-
)
|
|
441
|
-
self.annotation_definition.attributes = attributes
|
|
442
|
-
elif attributes is None:
|
|
443
|
-
if self._recipe_2_attributes:
|
|
444
|
-
self._recipe_2_attributes = {}
|
|
445
|
-
if self.annotation_definition.attributes:
|
|
446
|
-
self.annotation_definition.attributes = []
|
|
447
|
-
else:
|
|
448
|
-
raise ValueError('Attributes must be a dictionary or a list')
|
|
429
|
+
self.annotation_definition.attributes = attributes
|
|
449
430
|
|
|
450
431
|
@property
|
|
451
432
|
def color(self):
|
|
@@ -1369,9 +1350,9 @@ class Annotation(entities.BaseEntity):
|
|
|
1369
1350
|
status = _json['metadata']['system'].get('status', status)
|
|
1370
1351
|
|
|
1371
1352
|
named_attributes = metadata.get('system', dict()).get('attributes', None)
|
|
1372
|
-
|
|
1353
|
+
recipe_1_attributes = _json.get('attributes', None)
|
|
1373
1354
|
|
|
1374
|
-
first_frame_attributes =
|
|
1355
|
+
first_frame_attributes = recipe_1_attributes
|
|
1375
1356
|
first_frame_coordinates = list()
|
|
1376
1357
|
first_frame_number = 0
|
|
1377
1358
|
first_frame_start_time = 0
|
|
@@ -1427,7 +1408,7 @@ class Annotation(entities.BaseEntity):
|
|
|
1427
1408
|
def_dict = {'type': _json['type'],
|
|
1428
1409
|
'coordinates': coordinates,
|
|
1429
1410
|
'label': _json['label'],
|
|
1430
|
-
'attributes':
|
|
1411
|
+
'attributes': named_attributes}
|
|
1431
1412
|
annotation_definition = FrameAnnotation.json_to_annotation_definition(def_dict)
|
|
1432
1413
|
|
|
1433
1414
|
frames = entities.ReflectDict(
|
|
@@ -1472,9 +1453,9 @@ class Annotation(entities.BaseEntity):
|
|
|
1472
1453
|
start_frame=start_frame,
|
|
1473
1454
|
annotations=annotations,
|
|
1474
1455
|
start_time=start_time,
|
|
1475
|
-
recipe_2_attributes=named_attributes,
|
|
1476
1456
|
label_suggestions=_json.get('labelSuggestions', None),
|
|
1477
|
-
source=_json.get('source', None)
|
|
1457
|
+
source=_json.get('source', None),
|
|
1458
|
+
recipe_1_attributes=recipe_1_attributes,
|
|
1478
1459
|
)
|
|
1479
1460
|
annotation.annotation_definition = annotation_definition
|
|
1480
1461
|
annotation.__client_api = client_api
|
|
@@ -1600,15 +1581,9 @@ class Annotation(entities.BaseEntity):
|
|
|
1600
1581
|
if isinstance(self.annotation_definition, entities.Description):
|
|
1601
1582
|
_json['metadata']['system']['system'] = True
|
|
1602
1583
|
|
|
1603
|
-
if self.
|
|
1604
|
-
|
|
1605
|
-
|
|
1606
|
-
_json['attributes'] = self._platform_dict['attributes']
|
|
1607
|
-
else:
|
|
1608
|
-
_json['attributes'] = self.attributes
|
|
1609
|
-
orig_metadata_system = self._platform_dict.get('metadata', {}).get('system', {})
|
|
1610
|
-
if 'attributes' in orig_metadata_system:
|
|
1611
|
-
_json['metadata']['system']['attributes'] = orig_metadata_system['attributes']
|
|
1584
|
+
_json['metadata']['system']['attributes'] = self.attributes if self.attributes is not None else dict()
|
|
1585
|
+
_json['attributes'] = self._recipe_1_attributes
|
|
1586
|
+
|
|
1612
1587
|
|
|
1613
1588
|
# add frame info
|
|
1614
1589
|
if self.is_video or (self.end_time and self.end_time > 0) or (self.end_frame and self.end_frame > 0):
|
|
@@ -1645,7 +1620,7 @@ class Annotation(entities.BaseEntity):
|
|
|
1645
1620
|
:return: page of scores
|
|
1646
1621
|
"""
|
|
1647
1622
|
return self.annotations.task_scores(annotation_id=self.id ,task_id=task_id, page_offset=page_offset, page_size=page_size)
|
|
1648
|
-
|
|
1623
|
+
|
|
1649
1624
|
|
|
1650
1625
|
|
|
1651
1626
|
@attr.s
|
|
@@ -1665,8 +1640,8 @@ class FrameAnnotation(entities.BaseEntity):
|
|
|
1665
1640
|
object_visible = attr.ib()
|
|
1666
1641
|
|
|
1667
1642
|
# temp
|
|
1668
|
-
_recipe_2_attributes = attr.ib(repr=False, default=None)
|
|
1669
1643
|
_interpolation = attr.ib(repr=False, default=False)
|
|
1644
|
+
_recipe_1_attributes = attr.ib(repr=False, default=None)
|
|
1670
1645
|
|
|
1671
1646
|
################################
|
|
1672
1647
|
# parent annotation attributes #
|
|
@@ -1698,23 +1673,11 @@ class FrameAnnotation(entities.BaseEntity):
|
|
|
1698
1673
|
|
|
1699
1674
|
@property
|
|
1700
1675
|
def attributes(self):
|
|
1701
|
-
if self._recipe_2_attributes or self.annotation_definition.attributes == []:
|
|
1702
|
-
return self._recipe_2_attributes
|
|
1703
1676
|
return self.annotation_definition.attributes
|
|
1704
1677
|
|
|
1705
1678
|
@attributes.setter
|
|
1706
1679
|
def attributes(self, attributes):
|
|
1707
|
-
|
|
1708
|
-
self._recipe_2_attributes = attributes
|
|
1709
|
-
elif isinstance(attributes, list):
|
|
1710
|
-
warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead. "
|
|
1711
|
-
"For more details, refer to the documentation: "
|
|
1712
|
-
"https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
|
|
1713
|
-
DeprecationWarning,
|
|
1714
|
-
)
|
|
1715
|
-
self.annotation_definition.attributes = attributes
|
|
1716
|
-
else:
|
|
1717
|
-
raise ValueError('Attributes must be a dictionary or a list')
|
|
1680
|
+
self.annotation_definition.attributes = attributes
|
|
1718
1681
|
|
|
1719
1682
|
@property
|
|
1720
1683
|
def geo(self):
|
|
@@ -1795,6 +1758,11 @@ class FrameAnnotation(entities.BaseEntity):
|
|
|
1795
1758
|
|
|
1796
1759
|
@staticmethod
|
|
1797
1760
|
def json_to_annotation_definition(_json):
|
|
1761
|
+
if 'namedAttributes' in _json:
|
|
1762
|
+
_json['attributes'] = _json['namedAttributes']
|
|
1763
|
+
else:
|
|
1764
|
+
if not isinstance(_json.get('attributes'), dict):
|
|
1765
|
+
_json['attributes'] = None
|
|
1798
1766
|
if _json['type'] == 'segment':
|
|
1799
1767
|
annotation = entities.Polygon.from_json(_json)
|
|
1800
1768
|
elif _json['type'] == 'polyline':
|
|
@@ -1868,6 +1836,7 @@ class FrameAnnotation(entities.BaseEntity):
|
|
|
1868
1836
|
"""
|
|
1869
1837
|
# get annotation class
|
|
1870
1838
|
_json['type'] = annotation.type
|
|
1839
|
+
attrs = _json.get('attributes', None)
|
|
1871
1840
|
annotation_definition = cls.json_to_annotation_definition(_json=_json)
|
|
1872
1841
|
|
|
1873
1842
|
frame_num = _json.get('frame', annotation.last_frame + 1)
|
|
@@ -1881,9 +1850,7 @@ class FrameAnnotation(entities.BaseEntity):
|
|
|
1881
1850
|
frame_num=frame_num,
|
|
1882
1851
|
fixed=_json.get('fixed', False),
|
|
1883
1852
|
object_visible=_json.get('objectVisible', True),
|
|
1884
|
-
|
|
1885
|
-
# temp
|
|
1886
|
-
recipe_2_attributes=_json.get('namedAttributes', None)
|
|
1853
|
+
recipe_1_attributes=attrs,
|
|
1887
1854
|
)
|
|
1888
1855
|
|
|
1889
1856
|
def to_snapshot(self):
|
|
@@ -1900,9 +1867,11 @@ class FrameAnnotation(entities.BaseEntity):
|
|
|
1900
1867
|
if self.annotation_definition.description is not None:
|
|
1901
1868
|
snapshot_dict['description'] = self.annotation_definition.description
|
|
1902
1869
|
|
|
1903
|
-
if self.
|
|
1904
|
-
snapshot_dict['namedAttributes'] = self.
|
|
1905
|
-
|
|
1906
|
-
|
|
1870
|
+
if self.attributes is not None:
|
|
1871
|
+
snapshot_dict['namedAttributes'] = self.attributes
|
|
1872
|
+
|
|
1873
|
+
if self._recipe_1_attributes is not None:
|
|
1874
|
+
snapshot_dict['attributes'] = self._recipe_1_attributes
|
|
1875
|
+
|
|
1907
1876
|
|
|
1908
1877
|
return snapshot_dict
|
|
@@ -13,14 +13,9 @@ class BaseAnnotationDefinition:
|
|
|
13
13
|
self._bottom = 0
|
|
14
14
|
self._right = 0
|
|
15
15
|
self._annotation = None
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
"For more details, refer to the documentation: "
|
|
20
|
-
"https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
|
|
21
|
-
DeprecationWarning,
|
|
22
|
-
)
|
|
23
|
-
self._attributes = attributes
|
|
16
|
+
if attributes and not isinstance(attributes, dict):
|
|
17
|
+
raise TypeError('attributes should be a dictionary')
|
|
18
|
+
self._attributes = attributes or {}
|
|
24
19
|
|
|
25
20
|
@property
|
|
26
21
|
def attributes(self):
|
|
@@ -28,13 +23,10 @@ class BaseAnnotationDefinition:
|
|
|
28
23
|
|
|
29
24
|
@attributes.setter
|
|
30
25
|
def attributes(self, v):
|
|
31
|
-
if isinstance(v,
|
|
32
|
-
|
|
33
|
-
"For more details, refer to the documentation: "
|
|
34
|
-
"https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
|
|
35
|
-
DeprecationWarning,
|
|
36
|
-
)
|
|
26
|
+
if v and not isinstance(v, dict):
|
|
27
|
+
raise TypeError('attributes should be a dictionary')
|
|
37
28
|
self._attributes = v
|
|
29
|
+
|
|
38
30
|
@property
|
|
39
31
|
def top(self):
|
|
40
32
|
return self._top
|
dtlpy/entities/command.py
CHANGED
|
@@ -146,21 +146,24 @@ class Command(entities.BaseEntity):
|
|
|
146
146
|
entities.CommandsStatus.ABORTED
|
|
147
147
|
]
|
|
148
148
|
|
|
149
|
-
def wait(self, timeout=0, step=None, backoff_factor=1):
|
|
149
|
+
def wait(self, timeout=0, step=None, backoff_factor=1, iteration_callback=None):
|
|
150
150
|
"""
|
|
151
151
|
Wait for Command to finish
|
|
152
152
|
|
|
153
153
|
:param int timeout: int, seconds to wait until TimeoutError is raised. if 0 - wait until done
|
|
154
154
|
:param int step: int, seconds between polling
|
|
155
155
|
:param float backoff_factor: A backoff factor to apply between attempts after the second try
|
|
156
|
+
:param function iteration_callback: function to call on each iteration
|
|
156
157
|
:return: Command object
|
|
157
158
|
"""
|
|
158
159
|
if not self.in_progress():
|
|
159
160
|
return self
|
|
160
161
|
|
|
161
|
-
return self.commands.wait(
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
162
|
+
return self.commands.wait(
|
|
163
|
+
command_id=self.id,
|
|
164
|
+
timeout=timeout,
|
|
165
|
+
step=step,
|
|
166
|
+
url=self.url,
|
|
167
|
+
backoff_factor=backoff_factor,
|
|
168
|
+
iteration_callback=iteration_callback
|
|
169
|
+
)
|
dtlpy/entities/compute.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import traceback
|
|
1
2
|
from enum import Enum
|
|
2
3
|
from typing import List, Optional, Dict
|
|
3
4
|
from ..services.api_client import ApiClient
|
|
@@ -22,6 +23,7 @@ class ComputeStatus(str, Enum):
|
|
|
22
23
|
INITIALIZING = "initializing"
|
|
23
24
|
PAUSE = "pause"
|
|
24
25
|
FAILED = "failed"
|
|
26
|
+
VALIDATING = "validating"
|
|
25
27
|
|
|
26
28
|
|
|
27
29
|
class ComputeConsumptionMethod(str, Enum):
|
|
@@ -105,8 +107,8 @@ class DeploymentResources:
|
|
|
105
107
|
@classmethod
|
|
106
108
|
def from_json(cls, _json):
|
|
107
109
|
return cls(
|
|
108
|
-
request=DeploymentResource.from_json(_json.get('request'
|
|
109
|
-
limit=DeploymentResource.from_json(_json.get('limit'
|
|
110
|
+
request=DeploymentResource.from_json(_json.get('request') or dict()),
|
|
111
|
+
limit=DeploymentResource.from_json(_json.get('limit') or dict())
|
|
110
112
|
)
|
|
111
113
|
|
|
112
114
|
def to_json(self):
|
|
@@ -296,7 +298,8 @@ class Compute:
|
|
|
296
298
|
type: ComputeType = ComputeType.KUBERNETES,
|
|
297
299
|
features: Optional[Dict] = None,
|
|
298
300
|
metadata: Optional[Dict] = None,
|
|
299
|
-
settings: Optional[ComputeSettings] = None
|
|
301
|
+
settings: Optional[ComputeSettings] = None,
|
|
302
|
+
url: Optional[str] = None
|
|
300
303
|
):
|
|
301
304
|
self.id = id
|
|
302
305
|
self.name = name
|
|
@@ -311,6 +314,7 @@ class Compute:
|
|
|
311
314
|
self._computes = None
|
|
312
315
|
self._serviceDrivers = None
|
|
313
316
|
self.settings = settings
|
|
317
|
+
self.url = url
|
|
314
318
|
|
|
315
319
|
@property
|
|
316
320
|
def computes(self):
|
|
@@ -318,18 +322,30 @@ class Compute:
|
|
|
318
322
|
self._computes = repositories.Computes(client_api=self._client_api)
|
|
319
323
|
return self._computes
|
|
320
324
|
|
|
321
|
-
@property
|
|
322
|
-
def service_drivers(self):
|
|
323
|
-
if self._serviceDrivers is None:
|
|
324
|
-
self._serviceDrivers = repositories.ServiceDrivers(client_api=self._client_api)
|
|
325
|
-
return self._serviceDrivers
|
|
326
|
-
|
|
327
325
|
def delete(self):
|
|
328
326
|
return self.computes.delete(compute_id=self.id)
|
|
329
327
|
|
|
330
328
|
def update(self):
|
|
331
329
|
return self.computes.update(compute=self)
|
|
332
330
|
|
|
331
|
+
@staticmethod
|
|
332
|
+
def _protected_from_json(_json: dict, client_api: ApiClient):
|
|
333
|
+
"""
|
|
334
|
+
Same as from_json but with try-except to catch if error
|
|
335
|
+
|
|
336
|
+
:param _json: platform json
|
|
337
|
+
:param client_api: ApiClient entity
|
|
338
|
+
:return:
|
|
339
|
+
"""
|
|
340
|
+
try:
|
|
341
|
+
compute = Compute.from_json(_json=_json,
|
|
342
|
+
client_api=client_api)
|
|
343
|
+
status = True
|
|
344
|
+
except Exception:
|
|
345
|
+
compute = traceback.format_exc()
|
|
346
|
+
status = False
|
|
347
|
+
return status, compute
|
|
348
|
+
|
|
333
349
|
@classmethod
|
|
334
350
|
def from_json(cls, _json, client_api: ApiClient):
|
|
335
351
|
return cls(
|
|
@@ -343,12 +359,14 @@ class Compute:
|
|
|
343
359
|
features=_json.get('features'),
|
|
344
360
|
client_api=client_api,
|
|
345
361
|
metadata=_json.get('metadata'),
|
|
346
|
-
settings=ComputeSettings.from_json(_json.get('settings', dict())) if _json.get('settings') else None
|
|
362
|
+
settings=ComputeSettings.from_json(_json.get('settings', dict())) if _json.get('settings') else None,
|
|
363
|
+
url=_json.get('url'),
|
|
347
364
|
)
|
|
348
365
|
|
|
349
366
|
def to_json(self):
|
|
350
367
|
return {
|
|
351
368
|
'id': self.id,
|
|
369
|
+
'name': self.name,
|
|
352
370
|
'context': self.context.to_json(),
|
|
353
371
|
'sharedContexts': [sc.to_json() for sc in self.shared_contexts],
|
|
354
372
|
'global': self.global_,
|
|
@@ -356,7 +374,8 @@ class Compute:
|
|
|
356
374
|
'type': self.type.value,
|
|
357
375
|
'features': self.features,
|
|
358
376
|
'metadata': self.metadata,
|
|
359
|
-
'settings': self.settings.to_json() if isinstance(self.settings, ComputeSettings) else self.settings
|
|
377
|
+
'settings': self.settings.to_json() if isinstance(self.settings, ComputeSettings) else self.settings,
|
|
378
|
+
'url': self.url
|
|
360
379
|
}
|
|
361
380
|
|
|
362
381
|
|
|
@@ -374,16 +393,19 @@ class KubernetesCompute(Compute):
|
|
|
374
393
|
features: Optional[Dict] = None,
|
|
375
394
|
metadata: Optional[Dict] = None,
|
|
376
395
|
client_api: ApiClient = None,
|
|
377
|
-
settings: Optional[ComputeSettings] = None
|
|
396
|
+
settings: Optional[ComputeSettings] = None,
|
|
397
|
+
url: Optional[str] = None
|
|
378
398
|
):
|
|
379
399
|
super().__init__(id=id, context=context, shared_contexts=shared_contexts, global_=global_, status=status,
|
|
380
|
-
type=type, features=features, metadata=metadata, client_api=client_api, settings=settings,
|
|
400
|
+
type=type, features=features, metadata=metadata, client_api=client_api, settings=settings,
|
|
401
|
+
name=name, url=url)
|
|
381
402
|
self.cluster = cluster
|
|
382
403
|
|
|
383
404
|
@classmethod
|
|
384
405
|
def from_json(cls, _json, client_api: ApiClient):
|
|
385
406
|
return cls(
|
|
386
407
|
id=_json.get('id'),
|
|
408
|
+
name=_json.get('name'),
|
|
387
409
|
context=ComputeContext.from_json(_json.get('context', dict())),
|
|
388
410
|
cluster=ComputeCluster.from_json(_json.get('cluster', dict())),
|
|
389
411
|
shared_contexts=[ComputeContext.from_json(sc) for sc in _json.get('sharedContexts', list())],
|
|
@@ -393,12 +415,14 @@ class KubernetesCompute(Compute):
|
|
|
393
415
|
features=_json.get('features'),
|
|
394
416
|
metadata=_json.get('metadata'),
|
|
395
417
|
client_api=client_api,
|
|
396
|
-
settings=ComputeSettings.from_json(_json.get('settings', dict())) if _json.get('settings') else None
|
|
418
|
+
settings=ComputeSettings.from_json(_json.get('settings', dict())) if _json.get('settings') else None,
|
|
419
|
+
url=_json.get('url'),
|
|
397
420
|
)
|
|
398
421
|
|
|
399
422
|
def to_json(self):
|
|
400
423
|
return {
|
|
401
424
|
'id': self.id,
|
|
425
|
+
'name': self.name,
|
|
402
426
|
'context': self.context.to_json(),
|
|
403
427
|
'cluster': self.cluster.to_json(),
|
|
404
428
|
'sharedContexts': [sc.to_json() for sc in self.shared_contexts],
|
|
@@ -407,81 +431,6 @@ class KubernetesCompute(Compute):
|
|
|
407
431
|
'type': self.type.value,
|
|
408
432
|
'features': self.features,
|
|
409
433
|
'metadata': self.metadata,
|
|
410
|
-
'settings': self.settings.to_json() if isinstance(self.settings, ComputeSettings) else self.settings
|
|
434
|
+
'settings': self.settings.to_json() if isinstance(self.settings, ComputeSettings) else self.settings,
|
|
435
|
+
'url': self.url
|
|
411
436
|
}
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
class ServiceDriver:
|
|
415
|
-
def __init__(
|
|
416
|
-
self,
|
|
417
|
-
name: str,
|
|
418
|
-
context: ComputeContext,
|
|
419
|
-
compute_id: str,
|
|
420
|
-
client_api: ApiClient,
|
|
421
|
-
type: ComputeType = None,
|
|
422
|
-
created_at: str = None,
|
|
423
|
-
updated_at: str = None,
|
|
424
|
-
namespace: str = None,
|
|
425
|
-
metadata: Dict = None,
|
|
426
|
-
url: str = None,
|
|
427
|
-
archived: bool = None,
|
|
428
|
-
id: str = None,
|
|
429
|
-
is_cache_available: bool = None
|
|
430
|
-
):
|
|
431
|
-
self.name = name
|
|
432
|
-
self.context = context
|
|
433
|
-
self.compute_id = compute_id
|
|
434
|
-
self.client_api = client_api
|
|
435
|
-
self.type = type or ComputeType.KUBERNETES
|
|
436
|
-
self.created_at = created_at
|
|
437
|
-
self.updated_at = updated_at
|
|
438
|
-
self.namespace = namespace
|
|
439
|
-
self.metadata = metadata
|
|
440
|
-
self.url = url
|
|
441
|
-
self.archived = archived
|
|
442
|
-
self.id = id
|
|
443
|
-
self.is_cache_available = is_cache_available
|
|
444
|
-
|
|
445
|
-
@classmethod
|
|
446
|
-
def from_json(cls, _json, client_api: ApiClient):
|
|
447
|
-
return cls(
|
|
448
|
-
name=_json.get('name'),
|
|
449
|
-
context=ComputeContext.from_json(_json.get('context', dict())),
|
|
450
|
-
compute_id=_json.get('computeId'),
|
|
451
|
-
client_api=client_api,
|
|
452
|
-
type=_json.get('type', None),
|
|
453
|
-
created_at=_json.get('createdAt', None),
|
|
454
|
-
updated_at=_json.get('updatedAt', None),
|
|
455
|
-
namespace=_json.get('namespace', None),
|
|
456
|
-
metadata=_json.get('metadata', None),
|
|
457
|
-
url=_json.get('url', None),
|
|
458
|
-
archived=_json.get('archived', None),
|
|
459
|
-
id=_json.get('id', None),
|
|
460
|
-
is_cache_available=_json.get('isCacheAvailable', None)
|
|
461
|
-
)
|
|
462
|
-
|
|
463
|
-
def to_json(self):
|
|
464
|
-
_json = {
|
|
465
|
-
'name': self.name,
|
|
466
|
-
'context': self.context.to_json(),
|
|
467
|
-
'computeId': self.compute_id,
|
|
468
|
-
'type': self.type,
|
|
469
|
-
}
|
|
470
|
-
if self.created_at is not None:
|
|
471
|
-
_json['createdAt'] = self.namespace
|
|
472
|
-
if self.updated_at is not None:
|
|
473
|
-
_json['updatedAt'] = self.updated_at
|
|
474
|
-
if self.namespace is not None:
|
|
475
|
-
_json['namespace'] = self.namespace
|
|
476
|
-
if self.metadata is not None:
|
|
477
|
-
_json['metadata'] = self.metadata
|
|
478
|
-
if self.url is not None:
|
|
479
|
-
_json['url'] = self.url
|
|
480
|
-
if self.archived is not None:
|
|
481
|
-
_json['archived'] = self.archived
|
|
482
|
-
if self.id is not None:
|
|
483
|
-
_json['id'] = self.id
|
|
484
|
-
if self.is_cache_available is not None:
|
|
485
|
-
_json['isCacheAvailable'] = self.is_cache_available
|
|
486
|
-
|
|
487
|
-
return _json
|
dtlpy/entities/dataset.py
CHANGED
|
@@ -626,8 +626,9 @@ class Dataset(entities.BaseEntity):
|
|
|
626
626
|
alpha=1,
|
|
627
627
|
export_version=ExportVersion.V1,
|
|
628
628
|
dataset_lock=False,
|
|
629
|
-
lock_timeout_sec=None
|
|
630
|
-
|
|
629
|
+
lock_timeout_sec=None,
|
|
630
|
+
export_summary=False,
|
|
631
|
+
):
|
|
631
632
|
"""
|
|
632
633
|
Download dataset by filters.
|
|
633
634
|
Filtering the dataset for items and save them local
|
|
@@ -641,6 +642,7 @@ class Dataset(entities.BaseEntity):
|
|
|
641
642
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
642
643
|
:param bool overwrite: optional - default = False
|
|
643
644
|
:param bool dataset_lock: optional - default = False
|
|
645
|
+
:param bool export_summary: optional - default = False
|
|
644
646
|
:param int lock_timeout_sec: optional
|
|
645
647
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
646
648
|
:param bool with_text: optional - add text to annotations, default = False
|
|
@@ -664,9 +666,10 @@ class Dataset(entities.BaseEntity):
|
|
|
664
666
|
thickness=1,
|
|
665
667
|
with_text=False,
|
|
666
668
|
alpha=1,
|
|
667
|
-
dataset_lock=False
|
|
668
|
-
lock_timeout_sec=300
|
|
669
|
-
|
|
669
|
+
dataset_lock=False,
|
|
670
|
+
lock_timeout_sec=300,
|
|
671
|
+
export_summary=False
|
|
672
|
+
)
|
|
670
673
|
"""
|
|
671
674
|
|
|
672
675
|
return self.datasets.download_annotations(
|
|
@@ -685,7 +688,8 @@ class Dataset(entities.BaseEntity):
|
|
|
685
688
|
alpha=alpha,
|
|
686
689
|
export_version=export_version,
|
|
687
690
|
dataset_lock=dataset_lock,
|
|
688
|
-
lock_timeout_sec=lock_timeout_sec
|
|
691
|
+
lock_timeout_sec=lock_timeout_sec,
|
|
692
|
+
export_summary=export_summary
|
|
689
693
|
)
|
|
690
694
|
|
|
691
695
|
def export(self,
|
|
@@ -698,7 +702,8 @@ class Dataset(entities.BaseEntity):
|
|
|
698
702
|
export_type: ExportType = ExportType.JSON,
|
|
699
703
|
timeout: int = 0,
|
|
700
704
|
dataset_lock: bool = False,
|
|
701
|
-
lock_timeout_sec: int = None
|
|
705
|
+
lock_timeout_sec: int = None,
|
|
706
|
+
export_summary: bool = False):
|
|
702
707
|
"""
|
|
703
708
|
Export dataset items and annotations.
|
|
704
709
|
|
|
@@ -713,6 +718,7 @@ class Dataset(entities.BaseEntity):
|
|
|
713
718
|
:param bool include_feature_vectors: Include item feature vectors in the export
|
|
714
719
|
:param bool include_annotations: Include item annotations in the export
|
|
715
720
|
:param bool dataset_lock: Make dataset readonly during the export
|
|
721
|
+
:param bool export_summary: Download dataset export summary
|
|
716
722
|
:param int lock_timeout_sec: Timeout for locking the dataset during export in seconds
|
|
717
723
|
:param entities.ExportType export_type: Type of export ('json' or 'zip')
|
|
718
724
|
:param int timeout: Maximum time in seconds to wait for the export to complete
|
|
@@ -739,7 +745,8 @@ class Dataset(entities.BaseEntity):
|
|
|
739
745
|
export_type=export_type,
|
|
740
746
|
timeout=timeout,
|
|
741
747
|
dataset_lock=dataset_lock,
|
|
742
|
-
lock_timeout_sec=lock_timeout_sec
|
|
748
|
+
lock_timeout_sec=lock_timeout_sec,
|
|
749
|
+
export_summary=export_summary)
|
|
743
750
|
|
|
744
751
|
def upload_annotations(self,
|
|
745
752
|
local_path,
|
|
@@ -975,7 +982,8 @@ class Dataset(entities.BaseEntity):
|
|
|
975
982
|
alpha=1,
|
|
976
983
|
export_version=ExportVersion.V1,
|
|
977
984
|
dataset_lock=False,
|
|
978
|
-
lock_timeout_sec=None
|
|
985
|
+
lock_timeout_sec=None,
|
|
986
|
+
export_summary=False,
|
|
979
987
|
):
|
|
980
988
|
"""
|
|
981
989
|
Download dataset by filters.
|
|
@@ -991,6 +999,7 @@ class Dataset(entities.BaseEntity):
|
|
|
991
999
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
992
1000
|
:param bool overwrite: optional - default = False to overwrite the existing files
|
|
993
1001
|
:param bool dataset_lock: optional - default = False to make dataset readonly during the download
|
|
1002
|
+
:param bool export_summary: optional - default = False to get the symmary of the export
|
|
994
1003
|
:param int lock_timeout_sec: optional - Set lock timeout for the export
|
|
995
1004
|
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
996
1005
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
@@ -1011,7 +1020,8 @@ class Dataset(entities.BaseEntity):
|
|
|
1011
1020
|
with_text=False,
|
|
1012
1021
|
alpha=1,
|
|
1013
1022
|
dataset_lock=False,
|
|
1014
|
-
lock_timeout_sec=300
|
|
1023
|
+
lock_timeout_sec=300,
|
|
1024
|
+
export_summary=False
|
|
1015
1025
|
)
|
|
1016
1026
|
"""
|
|
1017
1027
|
return self.items.download(filters=filters,
|
|
@@ -1027,7 +1037,8 @@ class Dataset(entities.BaseEntity):
|
|
|
1027
1037
|
alpha=alpha,
|
|
1028
1038
|
export_version=export_version,
|
|
1029
1039
|
dataset_lock=dataset_lock,
|
|
1030
|
-
lock_timeout_sec=lock_timeout_sec
|
|
1040
|
+
lock_timeout_sec=lock_timeout_sec,
|
|
1041
|
+
export_summary=export_summary
|
|
1031
1042
|
)
|
|
1032
1043
|
|
|
1033
1044
|
def download_folder(
|
|
@@ -1046,7 +1057,8 @@ class Dataset(entities.BaseEntity):
|
|
|
1046
1057
|
alpha=1,
|
|
1047
1058
|
export_version=ExportVersion.V1,
|
|
1048
1059
|
dataset_lock=False,
|
|
1049
|
-
lock_timeout_sec=None
|
|
1060
|
+
lock_timeout_sec=None,
|
|
1061
|
+
export_summary=False,
|
|
1050
1062
|
):
|
|
1051
1063
|
"""
|
|
1052
1064
|
Download dataset folder.
|
|
@@ -1062,6 +1074,7 @@ class Dataset(entities.BaseEntity):
|
|
|
1062
1074
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
1063
1075
|
:param bool overwrite: optional - default = False to overwrite the existing files
|
|
1064
1076
|
:param bool dataset_lock: optional - default = False to make the dataset readonly during the download
|
|
1077
|
+
:param bool export_summary: optional - default = False to get the symmary of the export
|
|
1065
1078
|
:param bool lock_timeout_sec: optional - Set lock timeout for the export
|
|
1066
1079
|
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
1067
1080
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
@@ -1084,7 +1097,8 @@ class Dataset(entities.BaseEntity):
|
|
|
1084
1097
|
alpha=1,
|
|
1085
1098
|
save_locally=True,
|
|
1086
1099
|
dataset_lock=False
|
|
1087
|
-
lock_timeout_sec=300
|
|
1100
|
+
lock_timeout_sec=300,
|
|
1101
|
+
export_summary=False
|
|
1088
1102
|
)
|
|
1089
1103
|
"""
|
|
1090
1104
|
filters = self.datasets._bulid_folder_filter(folder_path=folder_path, filters=filters)
|
|
@@ -1101,7 +1115,8 @@ class Dataset(entities.BaseEntity):
|
|
|
1101
1115
|
alpha=alpha,
|
|
1102
1116
|
export_version=export_version,
|
|
1103
1117
|
dataset_lock=dataset_lock,
|
|
1104
|
-
lock_timeout_sec=lock_timeout_sec
|
|
1118
|
+
lock_timeout_sec=lock_timeout_sec,
|
|
1119
|
+
export_summary=export_summary
|
|
1105
1120
|
)
|
|
1106
1121
|
|
|
1107
1122
|
def delete_labels(self, label_names):
|
dtlpy/entities/dpk.py
CHANGED
|
@@ -43,6 +43,7 @@ class Slot(entities.DlEntity):
|
|
|
43
43
|
|
|
44
44
|
|
|
45
45
|
class Toolbar(entities.DlEntity):
|
|
46
|
+
name: str = entities.DlProperty(location=['name'], _type=str)
|
|
46
47
|
display_name: str = entities.DlProperty(location=['displayName'], _type=str)
|
|
47
48
|
conditions: dict = entities.DlProperty(location=['conditions'], _type=dict)
|
|
48
49
|
invoke: dict = entities.DlProperty(location=['invoke'], _type=dict)
|