dtlpy 1.108.7__py3-none-any.whl → 1.109.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +1 -7
- dtlpy/__version__.py +1 -1
- dtlpy/entities/__init__.py +3 -3
- dtlpy/entities/annotation.py +26 -57
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +6 -14
- dtlpy/entities/command.py +10 -7
- dtlpy/entities/compute.py +40 -91
- dtlpy/entities/dataset.py +29 -14
- dtlpy/entities/dpk.py +1 -0
- dtlpy/entities/filters.py +3 -1
- dtlpy/entities/item.py +7 -14
- dtlpy/entities/node.py +0 -12
- dtlpy/entities/service.py +0 -9
- dtlpy/entities/service_driver.py +118 -0
- dtlpy/entities/trigger.py +1 -1
- dtlpy/new_instance.py +1 -1
- dtlpy/repositories/__init__.py +2 -1
- dtlpy/repositories/collections.py +86 -34
- dtlpy/repositories/commands.py +14 -4
- dtlpy/repositories/computes.py +160 -123
- dtlpy/repositories/datasets.py +20 -9
- dtlpy/repositories/downloader.py +20 -8
- dtlpy/repositories/dpks.py +26 -1
- dtlpy/repositories/items.py +5 -2
- dtlpy/repositories/service_drivers.py +213 -0
- dtlpy/repositories/services.py +6 -0
- dtlpy-1.109.19.dist-info/METADATA +172 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/RECORD +35 -33
- dtlpy-1.108.7.dist-info/METADATA +0 -82
- {dtlpy-1.108.7.data → dtlpy-1.109.19.data}/scripts/dlp +0 -0
- {dtlpy-1.108.7.data → dtlpy-1.109.19.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.108.7.data → dtlpy-1.109.19.data}/scripts/dlp.py +0 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/LICENSE +0 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/WHEEL +0 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.108.7.dist-info → dtlpy-1.109.19.dist-info}/top_level.txt +0 -0
dtlpy/entities/filters.py
CHANGED
|
@@ -46,7 +46,9 @@ class FiltersResource(str, Enum):
|
|
|
46
46
|
DRIVERS = 'drivers'
|
|
47
47
|
SETTINGS = 'setting'
|
|
48
48
|
RESOURCE_EXECUTION = 'resourceExecution'
|
|
49
|
-
METRICS = 'metrics'
|
|
49
|
+
METRICS = 'metrics',
|
|
50
|
+
SERVICE_DRIVER = 'serviceDrivers',
|
|
51
|
+
COMPUTE = 'compute'
|
|
50
52
|
|
|
51
53
|
|
|
52
54
|
class FiltersOperations(str, Enum):
|
dtlpy/entities/item.py
CHANGED
|
@@ -455,7 +455,8 @@ class Item(entities.BaseEntity):
|
|
|
455
455
|
alpha=1,
|
|
456
456
|
export_version=ExportVersion.V1,
|
|
457
457
|
dataset_lock=False,
|
|
458
|
-
lock_timeout_sec=None
|
|
458
|
+
lock_timeout_sec=None,
|
|
459
|
+
export_summary=False,
|
|
459
460
|
):
|
|
460
461
|
"""
|
|
461
462
|
Download dataset by filters.
|
|
@@ -470,6 +471,7 @@ class Item(entities.BaseEntity):
|
|
|
470
471
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
471
472
|
:param bool overwrite: optional - default = False
|
|
472
473
|
:param bool dataset_lock: optional - default = False
|
|
474
|
+
:param bool export_summary: optional - default = False
|
|
473
475
|
:param int lock_timeout_sec: optional
|
|
474
476
|
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
475
477
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
@@ -491,7 +493,8 @@ class Item(entities.BaseEntity):
|
|
|
491
493
|
alpha=1,
|
|
492
494
|
save_locally=True,
|
|
493
495
|
dataset_lock=False
|
|
494
|
-
lock_timeout_sec=300
|
|
496
|
+
lock_timeout_sec=300,
|
|
497
|
+
export_summary=False
|
|
495
498
|
)
|
|
496
499
|
"""
|
|
497
500
|
# if dir - concatenate local path and item name
|
|
@@ -527,7 +530,8 @@ class Item(entities.BaseEntity):
|
|
|
527
530
|
export_version=export_version,
|
|
528
531
|
filters=filters,
|
|
529
532
|
dataset_lock=dataset_lock,
|
|
530
|
-
lock_timeout_sec=lock_timeout_sec
|
|
533
|
+
lock_timeout_sec=lock_timeout_sec,
|
|
534
|
+
export_summary=export_summary)
|
|
531
535
|
|
|
532
536
|
def delete(self):
|
|
533
537
|
"""
|
|
@@ -821,17 +825,6 @@ class Item(entities.BaseEntity):
|
|
|
821
825
|
{"key": key, "name": self.collections.get_name_by_key(key)}
|
|
822
826
|
for key in collections.keys()
|
|
823
827
|
]
|
|
824
|
-
|
|
825
|
-
def list_missing_collections(self) -> List[str]:
|
|
826
|
-
"""
|
|
827
|
-
List all items in the dataset that are not assigned to any collection.
|
|
828
|
-
|
|
829
|
-
:return: A list of item IDs that are not part of any collection.
|
|
830
|
-
"""
|
|
831
|
-
filters = entities.Filters()
|
|
832
|
-
filters.add(field='metadata.system.collections', values=None)
|
|
833
|
-
filters.add(field='datasetId', values=self._dataset.id)
|
|
834
|
-
return self._dataset.items.list(filters=filters)
|
|
835
828
|
|
|
836
829
|
def task_scores(self, task_id: str, page_offset: int = None, page_size: int = None):
|
|
837
830
|
"""
|
dtlpy/entities/node.py
CHANGED
|
@@ -91,7 +91,6 @@ class PipelineNodeIO:
|
|
|
91
91
|
port_id: str = None,
|
|
92
92
|
color: tuple = None,
|
|
93
93
|
port_percentage: int = None,
|
|
94
|
-
action: str = None,
|
|
95
94
|
default_value=None,
|
|
96
95
|
variable_name: str = None,
|
|
97
96
|
actions: list = None,
|
|
@@ -118,19 +117,8 @@ class PipelineNodeIO:
|
|
|
118
117
|
self.default_value = default_value
|
|
119
118
|
self.variable_name = variable_name
|
|
120
119
|
self.description = description
|
|
121
|
-
|
|
122
|
-
if action is not None:
|
|
123
|
-
warnings.warn('action param has been deprecated in version 1.95', DeprecationWarning)
|
|
124
|
-
if actions is None:
|
|
125
|
-
actions = []
|
|
126
|
-
actions.append(action)
|
|
127
120
|
self.actions = actions
|
|
128
121
|
|
|
129
|
-
@property
|
|
130
|
-
def action(self):
|
|
131
|
-
warnings.warn('action attribute has been deprecated in version 1.95', DeprecationWarning)
|
|
132
|
-
return None
|
|
133
|
-
|
|
134
122
|
@staticmethod
|
|
135
123
|
def from_json(_json: dict):
|
|
136
124
|
return PipelineNodeIO(
|
dtlpy/entities/service.py
CHANGED
|
@@ -869,20 +869,11 @@ class KubernetesAutoscalerType(str, Enum):
|
|
|
869
869
|
class KubernetesAutuscalerTypeMeta(type):
|
|
870
870
|
def __getattribute__(cls, item):
|
|
871
871
|
if hasattr(KubernetesAutoscalerType, item):
|
|
872
|
-
warnings.warn(
|
|
873
|
-
'KubernetesAutuscalerType is deprecated and will be removed in version 1.97.0, '
|
|
874
|
-
'use KubernetesAutoscalerType instead',
|
|
875
|
-
DeprecationWarning
|
|
876
|
-
)
|
|
877
872
|
return getattr(KubernetesAutoscalerType, item)
|
|
878
873
|
else:
|
|
879
874
|
raise AttributeError(f"KubernetesAutuscalerType has no attribute '{item}'")
|
|
880
875
|
|
|
881
876
|
|
|
882
|
-
class KubernetesAutuscalerType(metaclass=KubernetesAutuscalerTypeMeta):
|
|
883
|
-
pass
|
|
884
|
-
|
|
885
|
-
|
|
886
877
|
class KubernetesAutoscaler(entities.BaseEntity):
|
|
887
878
|
MIN_REPLICA_DEFAULT = 0
|
|
888
879
|
MAX_REPLICA_DEFAULT = 1
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import traceback
|
|
2
|
+
from typing import Dict
|
|
3
|
+
from ..services.api_client import ApiClient
|
|
4
|
+
from .. import repositories
|
|
5
|
+
from .compute import ComputeContext, ComputeType
|
|
6
|
+
|
|
7
|
+
class ServiceDriver:
|
|
8
|
+
def __init__(
|
|
9
|
+
self,
|
|
10
|
+
name: str,
|
|
11
|
+
context: ComputeContext,
|
|
12
|
+
compute_id: str,
|
|
13
|
+
client_api: ApiClient,
|
|
14
|
+
type: ComputeType = None,
|
|
15
|
+
created_at: str = None,
|
|
16
|
+
updated_at: str = None,
|
|
17
|
+
namespace: str = None,
|
|
18
|
+
metadata: Dict = None,
|
|
19
|
+
url: str = None,
|
|
20
|
+
archived: bool = None,
|
|
21
|
+
id: str = None,
|
|
22
|
+
is_cache_available: bool = None
|
|
23
|
+
):
|
|
24
|
+
self.name = name
|
|
25
|
+
self.context = context
|
|
26
|
+
self.compute_id = compute_id
|
|
27
|
+
self.client_api = client_api
|
|
28
|
+
self.type = type or ComputeType.KUBERNETES
|
|
29
|
+
self.created_at = created_at
|
|
30
|
+
self.updated_at = updated_at
|
|
31
|
+
self.namespace = namespace
|
|
32
|
+
self.metadata = metadata
|
|
33
|
+
self.url = url
|
|
34
|
+
self.archived = archived
|
|
35
|
+
self.id = id
|
|
36
|
+
self.is_cache_available = is_cache_available
|
|
37
|
+
self._service_drivers = None
|
|
38
|
+
self._client_api = client_api
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def service_drivers(self):
|
|
42
|
+
if self._service_drivers is None:
|
|
43
|
+
self._service_drivers = repositories.ServiceDrivers(client_api=self._client_api)
|
|
44
|
+
return self._service_drivers
|
|
45
|
+
|
|
46
|
+
@staticmethod
|
|
47
|
+
def _protected_from_json(_json: dict, client_api: ApiClient):
|
|
48
|
+
"""
|
|
49
|
+
Same as from_json but with try-except to catch if error
|
|
50
|
+
|
|
51
|
+
:param _json: platform json
|
|
52
|
+
:param client_api: ApiClient entity
|
|
53
|
+
:return:
|
|
54
|
+
"""
|
|
55
|
+
try:
|
|
56
|
+
service = ServiceDriver.from_json(_json=_json,
|
|
57
|
+
client_api=client_api)
|
|
58
|
+
status = True
|
|
59
|
+
except Exception:
|
|
60
|
+
service = traceback.format_exc()
|
|
61
|
+
status = False
|
|
62
|
+
return status, service
|
|
63
|
+
|
|
64
|
+
@classmethod
|
|
65
|
+
def from_json(cls, _json, client_api: ApiClient):
|
|
66
|
+
return cls(
|
|
67
|
+
name=_json.get('name'),
|
|
68
|
+
context=ComputeContext.from_json(_json.get('context', dict())),
|
|
69
|
+
compute_id=_json.get('computeId'),
|
|
70
|
+
client_api=client_api,
|
|
71
|
+
type=_json.get('type', None),
|
|
72
|
+
created_at=_json.get('createdAt', None),
|
|
73
|
+
updated_at=_json.get('updatedAt', None),
|
|
74
|
+
namespace=_json.get('namespace', None),
|
|
75
|
+
metadata=_json.get('metadata', None),
|
|
76
|
+
url=_json.get('url', None),
|
|
77
|
+
archived=_json.get('archived', None),
|
|
78
|
+
id=_json.get('id', None),
|
|
79
|
+
is_cache_available=_json.get('isCacheAvailable', None)
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
def to_json(self):
|
|
83
|
+
_json = {
|
|
84
|
+
'name': self.name,
|
|
85
|
+
'context': self.context.to_json(),
|
|
86
|
+
'computeId': self.compute_id,
|
|
87
|
+
'type': self.type,
|
|
88
|
+
}
|
|
89
|
+
if self.created_at is not None:
|
|
90
|
+
_json['createdAt'] = self.created_at
|
|
91
|
+
if self.updated_at is not None:
|
|
92
|
+
_json['updatedAt'] = self.updated_at
|
|
93
|
+
if self.namespace is not None:
|
|
94
|
+
_json['namespace'] = self.namespace
|
|
95
|
+
if self.metadata is not None:
|
|
96
|
+
_json['metadata'] = self.metadata
|
|
97
|
+
if self.url is not None:
|
|
98
|
+
_json['url'] = self.url
|
|
99
|
+
if self.archived is not None:
|
|
100
|
+
_json['archived'] = self.archived
|
|
101
|
+
if self.id is not None:
|
|
102
|
+
_json['id'] = self.id
|
|
103
|
+
if self.is_cache_available is not None:
|
|
104
|
+
_json['isCacheAvailable'] = self.is_cache_available
|
|
105
|
+
|
|
106
|
+
return _json
|
|
107
|
+
|
|
108
|
+
def delete(self):
|
|
109
|
+
"""
|
|
110
|
+
Delete a service driver
|
|
111
|
+
"""
|
|
112
|
+
return self.service_drivers.delete(service_driver_id=self.id)
|
|
113
|
+
|
|
114
|
+
def update(self):
|
|
115
|
+
"""
|
|
116
|
+
Update a service driver
|
|
117
|
+
"""
|
|
118
|
+
return self.service_drivers.update(service_driver=self)
|
dtlpy/entities/trigger.py
CHANGED
dtlpy/new_instance.py
CHANGED
|
@@ -22,7 +22,7 @@ class Dtlpy:
|
|
|
22
22
|
# triggers
|
|
23
23
|
TriggerResource, TriggerAction, TriggerExecutionMode, TriggerType,
|
|
24
24
|
# faas
|
|
25
|
-
FunctionIO, KubernetesAutoscalerType,
|
|
25
|
+
FunctionIO, KubernetesAutoscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRuntime,
|
|
26
26
|
InstanceCatalog, PackageInputType, ServiceType, ServiceModeType,
|
|
27
27
|
PackageSlot, SlotPostAction, SlotPostActionType, SlotDisplayScope, SlotDisplayScopeResource, UiBindingPanel,
|
|
28
28
|
# roberto
|
dtlpy/repositories/__init__.py
CHANGED
|
@@ -51,5 +51,6 @@ from .dpks import Dpks
|
|
|
51
51
|
from .messages import Messages
|
|
52
52
|
from .compositions import Compositions
|
|
53
53
|
from .schema import Schema
|
|
54
|
-
from .computes import Computes
|
|
54
|
+
from .computes import Computes
|
|
55
|
+
from .service_drivers import ServiceDrivers
|
|
55
56
|
from .collections import Collections
|
|
@@ -1,8 +1,5 @@
|
|
|
1
|
-
from
|
|
2
|
-
from
|
|
3
|
-
from dtlpy.entities.dataset import Dataset
|
|
4
|
-
from dtlpy.entities.filters import FiltersMethod
|
|
5
|
-
from dtlpy.services.api_client import ApiClient
|
|
1
|
+
from .. import entities, exceptions
|
|
2
|
+
from ..services.api_client import ApiClient
|
|
6
3
|
from typing import List
|
|
7
4
|
|
|
8
5
|
class Collections:
|
|
@@ -15,6 +12,26 @@ class Collections:
|
|
|
15
12
|
self._dataset = dataset
|
|
16
13
|
self._item = item
|
|
17
14
|
|
|
15
|
+
@property
|
|
16
|
+
def dataset(self) -> entities.Dataset:
|
|
17
|
+
if self._dataset is None:
|
|
18
|
+
raise ValueError("Must set dataset for this action.")
|
|
19
|
+
return self._dataset
|
|
20
|
+
|
|
21
|
+
@dataset.setter
|
|
22
|
+
def dataset(self, dataset: entities.Dataset):
|
|
23
|
+
self._dataset = dataset
|
|
24
|
+
|
|
25
|
+
@property
|
|
26
|
+
def item(self) -> entities.Item:
|
|
27
|
+
if self._item is None:
|
|
28
|
+
raise ValueError("Must set item for this action.")
|
|
29
|
+
return self._item
|
|
30
|
+
|
|
31
|
+
@item.setter
|
|
32
|
+
def item(self, item: entities.Item):
|
|
33
|
+
self._item = item
|
|
34
|
+
|
|
18
35
|
def create(self, name: str) -> entities.Collection:
|
|
19
36
|
"""
|
|
20
37
|
Creates a new collection in the dataset.
|
|
@@ -22,12 +39,11 @@ class Collections:
|
|
|
22
39
|
:param name: The name of the new collection.
|
|
23
40
|
:return: The created collection details.
|
|
24
41
|
"""
|
|
25
|
-
dataset_id = self._dataset.id
|
|
26
42
|
self.validate_max_collections()
|
|
27
43
|
self.validate_collection_name(name)
|
|
28
44
|
payload = {"name": name}
|
|
29
45
|
success, response = self._client_api.gen_request(
|
|
30
|
-
req_type="post", path=f"/datasets/{
|
|
46
|
+
req_type="post", path=f"/datasets/{self.dataset.id}/items/collections", json_req=payload
|
|
31
47
|
)
|
|
32
48
|
if success:
|
|
33
49
|
collection_json = self._single_collection(data=response.json(), name=name)
|
|
@@ -43,11 +59,10 @@ class Collections:
|
|
|
43
59
|
:param new_name: The new name for the collection.
|
|
44
60
|
:return: The updated collection details.
|
|
45
61
|
"""
|
|
46
|
-
dataset_id = self._dataset.id
|
|
47
62
|
self.validate_collection_name(new_name)
|
|
48
63
|
payload = {"name": new_name}
|
|
49
64
|
success, response = self._client_api.gen_request(
|
|
50
|
-
req_type="patch", path=f"/datasets/{
|
|
65
|
+
req_type="patch", path=f"/datasets/{self.dataset.id}/items/collections/{collection_name}", json_req=payload
|
|
51
66
|
)
|
|
52
67
|
if success:
|
|
53
68
|
collection_json = self._single_collection(data=response.json(), name=new_name)
|
|
@@ -61,9 +76,8 @@ class Collections:
|
|
|
61
76
|
|
|
62
77
|
:param collection_name: The name of the collection to delete.
|
|
63
78
|
"""
|
|
64
|
-
dataset_id = self._dataset.id
|
|
65
79
|
success, response = self._client_api.gen_request(
|
|
66
|
-
req_type="delete", path=f"/datasets/{
|
|
80
|
+
req_type="delete", path=f"/datasets/{self.dataset.id}/items/collections/{collection_name}"
|
|
67
81
|
)
|
|
68
82
|
if success:
|
|
69
83
|
# Wait for the split operation to complete
|
|
@@ -74,7 +88,7 @@ class Collections:
|
|
|
74
88
|
else:
|
|
75
89
|
raise exceptions.PlatformException(response)
|
|
76
90
|
|
|
77
|
-
def clone(self, collection_name: str) ->
|
|
91
|
+
def clone(self, collection_name: str) -> entities.Collection:
|
|
78
92
|
"""
|
|
79
93
|
Clones an existing collection, creating a new one with a unique name.
|
|
80
94
|
|
|
@@ -99,7 +113,10 @@ class Collections:
|
|
|
99
113
|
|
|
100
114
|
# Create the cloned collection
|
|
101
115
|
cloned_collection = self.create(name=clone_name)
|
|
102
|
-
|
|
116
|
+
filters = entities.Filters()
|
|
117
|
+
filters.add(field=f'metadata.system.collections.{original_collection["key"]}', values=True)
|
|
118
|
+
self.assign(collections=[cloned_collection.name],
|
|
119
|
+
filters=filters)
|
|
103
120
|
return cloned_collection
|
|
104
121
|
|
|
105
122
|
|
|
@@ -109,9 +126,8 @@ class Collections:
|
|
|
109
126
|
|
|
110
127
|
:return: A list of collections in the dataset.
|
|
111
128
|
"""
|
|
112
|
-
dataset_id = self._dataset.id
|
|
113
129
|
success, response = self._client_api.gen_request(
|
|
114
|
-
req_type="GET", path=f"/datasets/{
|
|
130
|
+
req_type="GET", path=f"/datasets/{self.dataset.id}/items/collections"
|
|
115
131
|
)
|
|
116
132
|
if success:
|
|
117
133
|
data = response.json()
|
|
@@ -140,6 +156,17 @@ class Collections:
|
|
|
140
156
|
if len(collections) >= 10:
|
|
141
157
|
raise ValueError("The dataset already has the maximum number of collections (10).")
|
|
142
158
|
|
|
159
|
+
def list_missing_collections(self) -> List[str]:
|
|
160
|
+
"""
|
|
161
|
+
List all items in the dataset that are not assigned to any collection.
|
|
162
|
+
|
|
163
|
+
:return: A list of item IDs that are not part of any collection.
|
|
164
|
+
"""
|
|
165
|
+
filters = entities.Filters()
|
|
166
|
+
filters.add(field='metadata.system.collections', values=None)
|
|
167
|
+
filters.add(field='datasetId', values=self._dataset.id)
|
|
168
|
+
return self._dataset.items.list(filters=filters)
|
|
169
|
+
|
|
143
170
|
def list_unassigned_items(self) -> list:
|
|
144
171
|
"""
|
|
145
172
|
List unassigned items in a dataset (items where all collection fields are false).
|
|
@@ -147,7 +174,7 @@ class Collections:
|
|
|
147
174
|
:return: List of unassigned item IDs
|
|
148
175
|
:rtype: list
|
|
149
176
|
"""
|
|
150
|
-
filters = entities.Filters(method=FiltersMethod.AND) # Use AND method for all conditions
|
|
177
|
+
filters = entities.Filters(method=entities.FiltersMethod.AND) # Use AND method for all conditions
|
|
151
178
|
collection_fields = [
|
|
152
179
|
"collections0",
|
|
153
180
|
"collections1",
|
|
@@ -163,7 +190,7 @@ class Collections:
|
|
|
163
190
|
|
|
164
191
|
# Add each field to the filter with a value of False
|
|
165
192
|
for field in collection_fields:
|
|
166
|
-
filters.add(field=field, values=False, method=FiltersMethod.AND)
|
|
193
|
+
filters.add(field=field, values=False, method=entities.FiltersMethod.AND)
|
|
167
194
|
|
|
168
195
|
missing_ids = []
|
|
169
196
|
pages = self._dataset.items.list(filters=filters)
|
|
@@ -176,31 +203,33 @@ class Collections:
|
|
|
176
203
|
|
|
177
204
|
def assign(
|
|
178
205
|
self,
|
|
179
|
-
dataset_id: str,
|
|
180
206
|
collections: List[str],
|
|
207
|
+
dataset_id: str = None,
|
|
181
208
|
item_id: str = None,
|
|
182
|
-
|
|
209
|
+
filters: entities.Filters = None
|
|
183
210
|
) -> bool:
|
|
184
211
|
"""
|
|
185
212
|
Assign an item to a collection. Creates the collection if it does not exist.
|
|
186
213
|
|
|
187
|
-
:param dataset_id: ID of the dataset.
|
|
188
214
|
:param collections: List of the collections to assign the item to.
|
|
215
|
+
:param dataset_id: ID of the dataset.
|
|
189
216
|
:param item_id: (Optional) ID of the item to assign. If not provided, all items in the dataset will be updated.
|
|
190
|
-
:param
|
|
217
|
+
:param filters: (Optional) Filters of items to assign to the collections.
|
|
191
218
|
:return: True if the assignment was successful, otherwise raises an exception.
|
|
192
219
|
"""
|
|
220
|
+
if not isinstance(collections, list):
|
|
221
|
+
raise ValueError("collections must be a list.")
|
|
222
|
+
if dataset_id is None and self._dataset is not None:
|
|
223
|
+
dataset_id = self.dataset.id
|
|
224
|
+
if item_id is None and self._item is not None:
|
|
225
|
+
item_id = self.item.id
|
|
193
226
|
# Build the query structure
|
|
194
|
-
if
|
|
195
|
-
query = {
|
|
196
|
-
"filter": {
|
|
197
|
-
f"metadata.system.collections.{collection_key}": True
|
|
198
|
-
}
|
|
199
|
-
}
|
|
200
|
-
elif item_id:
|
|
227
|
+
if item_id is not None:
|
|
201
228
|
query = {
|
|
202
229
|
"id": {"$eq": item_id}
|
|
203
230
|
}
|
|
231
|
+
elif filters is not None:
|
|
232
|
+
query = filters.prepare().get("filter")
|
|
204
233
|
else:
|
|
205
234
|
raise ValueError("Either collection_key or item_id must be provided.")
|
|
206
235
|
|
|
@@ -226,16 +255,39 @@ class Collections:
|
|
|
226
255
|
raise exceptions.PlatformException(f"Failed to assign item to collections: {response}")
|
|
227
256
|
|
|
228
257
|
|
|
229
|
-
def unassign(self,
|
|
258
|
+
def unassign(self,
|
|
259
|
+
collections: List[str],
|
|
260
|
+
dataset_id: str = None,
|
|
261
|
+
item_id: str = None,
|
|
262
|
+
filters: entities.Filters = None) -> bool:
|
|
230
263
|
"""
|
|
231
264
|
Unassign an item from a collection.
|
|
232
|
-
:param item_id: ID of the item.
|
|
233
265
|
:param collections: List of collection names to unassign.
|
|
266
|
+
:param dataset_id: ID of the dataset.
|
|
267
|
+
:param item_id: ID of the item.
|
|
268
|
+
:param filters: (Optional) Filters of items to unassign from the collections.
|
|
234
269
|
"""
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
270
|
+
if not isinstance(collections, list):
|
|
271
|
+
raise ValueError("collections must be a list.")
|
|
272
|
+
# build the context
|
|
273
|
+
if dataset_id is None and self._dataset is not None:
|
|
274
|
+
dataset_id = self._dataset.id
|
|
275
|
+
if item_id is None and self._item is not None:
|
|
276
|
+
item_id = self._item.id
|
|
277
|
+
|
|
278
|
+
# build the payload
|
|
279
|
+
if item_id is not None and filters is None:
|
|
280
|
+
payload = {
|
|
281
|
+
"query": {"id": {"$eq": item_id}},
|
|
282
|
+
"collections": collections,
|
|
283
|
+
}
|
|
284
|
+
elif filters is not None and item_id is None:
|
|
285
|
+
payload = {
|
|
286
|
+
"query": filters.prepare().get("filter"),
|
|
287
|
+
"collections": collections,
|
|
288
|
+
}
|
|
289
|
+
else:
|
|
290
|
+
raise ValueError("Either item_id or filters must be provided but not both.")
|
|
239
291
|
success, response = self._client_api.gen_request(
|
|
240
292
|
req_type="post",
|
|
241
293
|
path=f"/datasets/{dataset_id}/items/collections/bulk-remove",
|
dtlpy/repositories/commands.py
CHANGED
|
@@ -70,7 +70,7 @@ class Commands:
|
|
|
70
70
|
return entities.Command.from_json(client_api=self._client_api,
|
|
71
71
|
_json=response.json())
|
|
72
72
|
|
|
73
|
-
def wait(self, command_id, timeout=0, step=None, url=None, backoff_factor=1):
|
|
73
|
+
def wait(self, command_id, timeout=0, step=None, url=None, backoff_factor=1, iteration_callback=None):
|
|
74
74
|
"""
|
|
75
75
|
Wait for command to finish
|
|
76
76
|
|
|
@@ -84,6 +84,7 @@ class Commands:
|
|
|
84
84
|
:param int timeout: int, seconds to wait until TimeoutError is raised. if 0 - wait until done
|
|
85
85
|
:param int step: int, seconds between polling
|
|
86
86
|
:param str url: url to the command
|
|
87
|
+
:param function iteration_callback: function to call on each iteration
|
|
87
88
|
:param float backoff_factor: A backoff factor to apply between attempts after the second try
|
|
88
89
|
:return: Command object
|
|
89
90
|
"""
|
|
@@ -112,9 +113,18 @@ class Commands:
|
|
|
112
113
|
elapsed = time.time() - start
|
|
113
114
|
sleep_time = np.min([timeout - elapsed, backoff_factor * (2 ** num_tries), MAX_SLEEP_TIME])
|
|
114
115
|
num_tries += 1
|
|
115
|
-
logger.debug(
|
|
116
|
-
|
|
117
|
-
|
|
116
|
+
logger.debug(
|
|
117
|
+
"Command {!r} is running for {:.2f}[s] and now Going to sleep {:.2f}[s]".format(
|
|
118
|
+
command.id,
|
|
119
|
+
elapsed,
|
|
120
|
+
sleep_time
|
|
121
|
+
)
|
|
122
|
+
)
|
|
123
|
+
if iteration_callback is not None:
|
|
124
|
+
try:
|
|
125
|
+
iteration_callback()
|
|
126
|
+
except Exception as e:
|
|
127
|
+
logger.warning('iteration_callback failed: {}'.format(e.__str__()))
|
|
118
128
|
time.sleep(sleep_time)
|
|
119
129
|
pbar.close()
|
|
120
130
|
if command is None:
|