dtlpy 1.103.12__py3-none-any.whl → 1.105.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +1 -0
- dtlpy/__version__.py +1 -1
- dtlpy/entities/__init__.py +1 -0
- dtlpy/entities/annotation.py +12 -1
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +13 -4
- dtlpy/entities/collection.py +39 -0
- dtlpy/entities/command.py +10 -5
- dtlpy/entities/compute.py +59 -6
- dtlpy/entities/dataset.py +8 -2
- dtlpy/entities/execution.py +6 -0
- dtlpy/entities/item.py +56 -2
- dtlpy/entities/ontology.py +4 -1
- dtlpy/repositories/__init__.py +1 -0
- dtlpy/repositories/collections.py +296 -0
- dtlpy/repositories/computes.py +10 -4
- dtlpy/repositories/downloader.py +2 -0
- dtlpy/repositories/integrations.py +13 -18
- dtlpy/repositories/items.py +1 -1
- dtlpy/services/api_client.py +2 -2
- {dtlpy-1.103.12.dist-info → dtlpy-1.105.6.dist-info}/METADATA +1 -1
- {dtlpy-1.103.12.dist-info → dtlpy-1.105.6.dist-info}/RECORD +29 -28
- tests/features/environment.py +34 -0
- dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
- {dtlpy-1.103.12.data → dtlpy-1.105.6.data}/scripts/dlp +0 -0
- {dtlpy-1.103.12.data → dtlpy-1.105.6.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.103.12.data → dtlpy-1.105.6.data}/scripts/dlp.py +0 -0
- {dtlpy-1.103.12.dist-info → dtlpy-1.105.6.dist-info}/LICENSE +0 -0
- {dtlpy-1.103.12.dist-info → dtlpy-1.105.6.dist-info}/WHEEL +0 -0
- {dtlpy-1.103.12.dist-info → dtlpy-1.105.6.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.103.12.dist-info → dtlpy-1.105.6.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py
CHANGED
|
@@ -169,6 +169,7 @@ messages = repositories.Messages(client_api=client_api)
|
|
|
169
169
|
compositions = repositories.Compositions(client_api=client_api)
|
|
170
170
|
computes = repositories.Computes(client_api=client_api)
|
|
171
171
|
service_drivers = repositories.ServiceDrivers(client_api=client_api)
|
|
172
|
+
collections = repositories.Collections(client_api=client_api)
|
|
172
173
|
|
|
173
174
|
try:
|
|
174
175
|
check_sdk.check(version=__version__, client_api=client_api)
|
dtlpy/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = '1.
|
|
1
|
+
version = '1.105.6'
|
dtlpy/entities/__init__.py
CHANGED
|
@@ -79,3 +79,4 @@ from .compute import ClusterProvider, ComputeType, ComputeStatus, Toleration, De
|
|
|
79
79
|
NodePool, AuthenticationIntegration, Authentication, ComputeCluster, ComputeContext, Compute, KubernetesCompute, \
|
|
80
80
|
ServiceDriver
|
|
81
81
|
from .gis_item import ItemGis, Layer
|
|
82
|
+
from .collection import Collection
|
dtlpy/entities/annotation.py
CHANGED
|
@@ -7,6 +7,7 @@ import copy
|
|
|
7
7
|
import attr
|
|
8
8
|
import json
|
|
9
9
|
import os
|
|
10
|
+
import warnings
|
|
10
11
|
|
|
11
12
|
from PIL import Image
|
|
12
13
|
from enum import Enum
|
|
@@ -421,7 +422,7 @@ class Annotation(entities.BaseEntity):
|
|
|
421
422
|
|
|
422
423
|
@property
|
|
423
424
|
def attributes(self):
|
|
424
|
-
if self._recipe_2_attributes or self.annotation_definition.attributes == []:
|
|
425
|
+
if self._recipe_2_attributes is not None or self.annotation_definition.attributes == []:
|
|
425
426
|
return self._recipe_2_attributes
|
|
426
427
|
return self.annotation_definition.attributes
|
|
427
428
|
|
|
@@ -430,6 +431,11 @@ class Annotation(entities.BaseEntity):
|
|
|
430
431
|
if isinstance(attributes, dict):
|
|
431
432
|
self._recipe_2_attributes = attributes
|
|
432
433
|
elif isinstance(attributes, list):
|
|
434
|
+
warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead. "
|
|
435
|
+
"For more details, refer to the documentation: "
|
|
436
|
+
"https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
|
|
437
|
+
DeprecationWarning,
|
|
438
|
+
)
|
|
433
439
|
self.annotation_definition.attributes = attributes
|
|
434
440
|
elif attributes is None:
|
|
435
441
|
if self._recipe_2_attributes:
|
|
@@ -1688,6 +1694,11 @@ class FrameAnnotation(entities.BaseEntity):
|
|
|
1688
1694
|
if isinstance(attributes, dict):
|
|
1689
1695
|
self._recipe_2_attributes = attributes
|
|
1690
1696
|
elif isinstance(attributes, list):
|
|
1697
|
+
warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead. "
|
|
1698
|
+
"For more details, refer to the documentation: "
|
|
1699
|
+
"https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
|
|
1700
|
+
DeprecationWarning,
|
|
1701
|
+
)
|
|
1691
1702
|
self.annotation_definition.attributes = attributes
|
|
1692
1703
|
else:
|
|
1693
1704
|
raise ValueError('Attributes must be a dictionary or a list')
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import numpy as np
|
|
3
|
+
import warnings
|
|
3
4
|
|
|
4
5
|
logger = logging.getLogger(name='dtlpy')
|
|
5
6
|
|
|
@@ -13,8 +14,12 @@ class BaseAnnotationDefinition:
|
|
|
13
14
|
self._right = 0
|
|
14
15
|
self._annotation = None
|
|
15
16
|
|
|
16
|
-
if attributes
|
|
17
|
-
attributes
|
|
17
|
+
if isinstance(attributes, list) and len(attributes) > 0:
|
|
18
|
+
warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead."
|
|
19
|
+
"For more details, refer to the documentation: "
|
|
20
|
+
"https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
|
|
21
|
+
DeprecationWarning,
|
|
22
|
+
)
|
|
18
23
|
self._attributes = attributes
|
|
19
24
|
|
|
20
25
|
@property
|
|
@@ -23,8 +28,12 @@ class BaseAnnotationDefinition:
|
|
|
23
28
|
|
|
24
29
|
@attributes.setter
|
|
25
30
|
def attributes(self, v):
|
|
26
|
-
if
|
|
27
|
-
|
|
31
|
+
if isinstance(v, list):
|
|
32
|
+
warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead. "
|
|
33
|
+
"For more details, refer to the documentation: "
|
|
34
|
+
"https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
|
|
35
|
+
DeprecationWarning,
|
|
36
|
+
)
|
|
28
37
|
self._attributes = v
|
|
29
38
|
@property
|
|
30
39
|
def top(self):
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from .. import entities
|
|
2
|
+
from ..services.api_client import ApiClient
|
|
3
|
+
import attr
|
|
4
|
+
|
|
5
|
+
@attr.s
|
|
6
|
+
class Collection(entities.BaseEntity):
|
|
7
|
+
"""
|
|
8
|
+
Represents a collection in the dataset.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
# sdk
|
|
12
|
+
_client_api = attr.ib(type=ApiClient, repr=False)
|
|
13
|
+
|
|
14
|
+
key = attr.ib(type=str)
|
|
15
|
+
name = attr.ib(type=str)
|
|
16
|
+
|
|
17
|
+
@classmethod
|
|
18
|
+
def from_json(cls, _json, client_api, is_fetched=True):
|
|
19
|
+
"""
|
|
20
|
+
Create a single Collection entity from the dataset JSON.
|
|
21
|
+
|
|
22
|
+
:param _json: A dictionary containing collection data in the format:
|
|
23
|
+
{ "metadata.system.collections.c0": {"name": "Justice League"} }
|
|
24
|
+
:param client_api: The client API instance.
|
|
25
|
+
:param is_fetched: Whether the entity was fetched from the platform.
|
|
26
|
+
:return: A single Collection entity.
|
|
27
|
+
"""
|
|
28
|
+
full_key, value = next(iter(_json.items()))
|
|
29
|
+
# Strip the prefix
|
|
30
|
+
key = full_key.replace("metadata.system.collections.", "")
|
|
31
|
+
name = value.get("name")
|
|
32
|
+
|
|
33
|
+
inst = cls(
|
|
34
|
+
key=key,
|
|
35
|
+
name=name,
|
|
36
|
+
client_api=client_api,
|
|
37
|
+
)
|
|
38
|
+
inst.is_fetched = is_fetched
|
|
39
|
+
return inst
|
dtlpy/entities/command.py
CHANGED
|
@@ -18,6 +18,8 @@ class CommandsStatus(str, Enum):
|
|
|
18
18
|
SUCCESS = 'success'
|
|
19
19
|
FAILED = 'failed'
|
|
20
20
|
TIMEOUT = 'timeout'
|
|
21
|
+
CLEANING_UP = 'cleaning-up'
|
|
22
|
+
ON_ERROR = 'on-error'
|
|
21
23
|
|
|
22
24
|
|
|
23
25
|
@attr.s
|
|
@@ -135,11 +137,14 @@ class Command(entities.BaseEntity):
|
|
|
135
137
|
:return: True if command still in progress
|
|
136
138
|
:rtype: bool
|
|
137
139
|
"""
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
140
|
+
if self.status not in {status for status in entities.CommandsStatus}:
|
|
141
|
+
raise ValueError('status is not a valid CommandsStatus')
|
|
142
|
+
return self.status not in [entities.CommandsStatus.SUCCESS,
|
|
143
|
+
entities.CommandsStatus.FAILED,
|
|
144
|
+
entities.CommandsStatus.TIMEOUT,
|
|
145
|
+
entities.CommandsStatus.CANCELED,
|
|
146
|
+
entities.CommandsStatus.ABORTED
|
|
147
|
+
]
|
|
143
148
|
|
|
144
149
|
def wait(self, timeout=0, step=None, backoff_factor=1):
|
|
145
150
|
"""
|
dtlpy/entities/compute.py
CHANGED
|
@@ -230,10 +230,11 @@ class ComputeCluster:
|
|
|
230
230
|
devops_output['config']['kubernetesVersion'],
|
|
231
231
|
ClusterProvider(devops_output['config']['provider']),
|
|
232
232
|
node_pools,
|
|
233
|
-
|
|
234
|
-
Authentication(AuthenticationIntegration(integration.id,integration.type))
|
|
233
|
+
{},
|
|
234
|
+
Authentication(AuthenticationIntegration(integration.id, integration.type))
|
|
235
235
|
)
|
|
236
236
|
|
|
237
|
+
|
|
237
238
|
class ComputeContext:
|
|
238
239
|
def __init__(self, labels: List[str], org: str, project: Optional[str] = None):
|
|
239
240
|
self.labels = labels
|
|
@@ -376,11 +377,35 @@ class KubernetesCompute(Compute):
|
|
|
376
377
|
|
|
377
378
|
|
|
378
379
|
class ServiceDriver:
|
|
379
|
-
def __init__(
|
|
380
|
+
def __init__(
|
|
381
|
+
self,
|
|
382
|
+
name: str,
|
|
383
|
+
context: ComputeContext,
|
|
384
|
+
compute_id: str,
|
|
385
|
+
client_api: ApiClient,
|
|
386
|
+
type: ComputeType = None,
|
|
387
|
+
created_at: str = None,
|
|
388
|
+
updated_at: str = None,
|
|
389
|
+
namespace: str = None,
|
|
390
|
+
metadata: Dict = None,
|
|
391
|
+
url: str = None,
|
|
392
|
+
archived: bool = None,
|
|
393
|
+
id: str = None,
|
|
394
|
+
is_cache_available: bool = None
|
|
395
|
+
):
|
|
380
396
|
self.name = name
|
|
381
397
|
self.context = context
|
|
382
398
|
self.compute_id = compute_id
|
|
383
399
|
self.client_api = client_api
|
|
400
|
+
self.type = type or ComputeType.KUBERNETES
|
|
401
|
+
self.created_at = created_at
|
|
402
|
+
self.updated_at = updated_at
|
|
403
|
+
self.namespace = namespace
|
|
404
|
+
self.metadata = metadata
|
|
405
|
+
self.url = url
|
|
406
|
+
self.archived = archived
|
|
407
|
+
self.id = id
|
|
408
|
+
self.is_cache_available = is_cache_available
|
|
384
409
|
|
|
385
410
|
@classmethod
|
|
386
411
|
def from_json(cls, _json, client_api: ApiClient):
|
|
@@ -388,12 +413,40 @@ class ServiceDriver:
|
|
|
388
413
|
name=_json.get('name'),
|
|
389
414
|
context=ComputeContext.from_json(_json.get('context', dict())),
|
|
390
415
|
compute_id=_json.get('computeId'),
|
|
391
|
-
client_api=client_api
|
|
416
|
+
client_api=client_api,
|
|
417
|
+
type=_json.get('type', None),
|
|
418
|
+
created_at=_json.get('createdAt', None),
|
|
419
|
+
updated_at=_json.get('updatedAt', None),
|
|
420
|
+
namespace=_json.get('namespace', None),
|
|
421
|
+
metadata=_json.get('metadata', None),
|
|
422
|
+
url=_json.get('url', None),
|
|
423
|
+
archived=_json.get('archived', None),
|
|
424
|
+
id=_json.get('id', None),
|
|
425
|
+
is_cache_available=_json.get('isCacheAvailable', None)
|
|
392
426
|
)
|
|
393
427
|
|
|
394
428
|
def to_json(self):
|
|
395
|
-
|
|
429
|
+
_json = {
|
|
396
430
|
'name': self.name,
|
|
397
431
|
'context': self.context.to_json(),
|
|
398
|
-
'computeId': self.compute_id
|
|
432
|
+
'computeId': self.compute_id,
|
|
433
|
+
'type': self.type,
|
|
399
434
|
}
|
|
435
|
+
if self.created_at is not None:
|
|
436
|
+
_json['createdAt'] = self.namespace
|
|
437
|
+
if self.updated_at is not None:
|
|
438
|
+
_json['updatedAt'] = self.updated_at
|
|
439
|
+
if self.namespace is not None:
|
|
440
|
+
_json['namespace'] = self.namespace
|
|
441
|
+
if self.metadata is not None:
|
|
442
|
+
_json['metadata'] = self.metadata
|
|
443
|
+
if self.url is not None:
|
|
444
|
+
_json['url'] = self.url
|
|
445
|
+
if self.archived is not None:
|
|
446
|
+
_json['archived'] = self.archived
|
|
447
|
+
if self.id is not None:
|
|
448
|
+
_json['id'] = self.id
|
|
449
|
+
if self.is_cache_available is not None:
|
|
450
|
+
_json['isCacheAvailable'] = self.is_cache_available
|
|
451
|
+
|
|
452
|
+
return _json
|
dtlpy/entities/dataset.py
CHANGED
|
@@ -284,7 +284,7 @@ class Dataset(entities.BaseEntity):
|
|
|
284
284
|
def set_repositories(self):
|
|
285
285
|
reps = namedtuple('repositories',
|
|
286
286
|
field_names=['items', 'recipes', 'datasets', 'assignments', 'tasks', 'annotations',
|
|
287
|
-
'ontologies', 'features', 'settings', 'schema'])
|
|
287
|
+
'ontologies', 'features', 'settings', 'schema', 'collections'])
|
|
288
288
|
if self._project is None:
|
|
289
289
|
datasets = repositories.Datasets(client_api=self._client_api, project=self._project)
|
|
290
290
|
else:
|
|
@@ -300,7 +300,8 @@ class Dataset(entities.BaseEntity):
|
|
|
300
300
|
ontologies=repositories.Ontologies(client_api=self._client_api, dataset=self),
|
|
301
301
|
features=repositories.Features(client_api=self._client_api, project=self._project, dataset=self),
|
|
302
302
|
settings=repositories.Settings(client_api=self._client_api, dataset=self),
|
|
303
|
-
schema=repositories.Schema(client_api=self._client_api, dataset=self)
|
|
303
|
+
schema=repositories.Schema(client_api=self._client_api, dataset=self),
|
|
304
|
+
collections=repositories.Collections(client_api=self._client_api, dataset=self)
|
|
304
305
|
)
|
|
305
306
|
|
|
306
307
|
@property
|
|
@@ -348,6 +349,11 @@ class Dataset(entities.BaseEntity):
|
|
|
348
349
|
assert isinstance(self._repositories.features, repositories.Features)
|
|
349
350
|
return self._repositories.features
|
|
350
351
|
|
|
352
|
+
@property
|
|
353
|
+
def collections(self):
|
|
354
|
+
assert isinstance(self._repositories.collections, repositories.Collections)
|
|
355
|
+
return self._repositories.collections
|
|
356
|
+
|
|
351
357
|
@property
|
|
352
358
|
def schema(self):
|
|
353
359
|
assert isinstance(self._repositories.schema, repositories.Schema)
|
dtlpy/entities/execution.py
CHANGED
|
@@ -63,6 +63,8 @@ class Execution(entities.BaseEntity):
|
|
|
63
63
|
# optional
|
|
64
64
|
pipeline = attr.ib(type=dict, default=None, repr=False)
|
|
65
65
|
model = attr.ib(type=dict, default=None, repr=False)
|
|
66
|
+
app = attr.ib(default=None)
|
|
67
|
+
driver_id = attr.ib(default=None)
|
|
66
68
|
|
|
67
69
|
################
|
|
68
70
|
# repositories #
|
|
@@ -189,6 +191,8 @@ class Execution(entities.BaseEntity):
|
|
|
189
191
|
pipeline=_json.get('pipeline', None),
|
|
190
192
|
model=_json.get('model', None),
|
|
191
193
|
package_revision=_json.get('packageRevision', None),
|
|
194
|
+
app=_json.get('app', None),
|
|
195
|
+
driver_id=_json.get('driverId', None)
|
|
192
196
|
)
|
|
193
197
|
inst.is_fetched = is_fetched
|
|
194
198
|
return inst
|
|
@@ -226,6 +230,7 @@ class Execution(entities.BaseEntity):
|
|
|
226
230
|
attr.fields(Execution).pipeline,
|
|
227
231
|
attr.fields(Execution).model,
|
|
228
232
|
attr.fields(Execution).package_revision,
|
|
233
|
+
attr.fields(Execution).driver_id,
|
|
229
234
|
)
|
|
230
235
|
)
|
|
231
236
|
|
|
@@ -247,6 +252,7 @@ class Execution(entities.BaseEntity):
|
|
|
247
252
|
_json['feedbackQueue'] = self.feedback_queue
|
|
248
253
|
_json['syncReplyTo '] = self.sync_reply_to
|
|
249
254
|
_json['packageRevision'] = self.package_revision
|
|
255
|
+
_json['driverId'] = self.driver_id
|
|
250
256
|
|
|
251
257
|
if self.pipeline:
|
|
252
258
|
_json['pipeline'] = self.pipeline
|
dtlpy/entities/item.py
CHANGED
|
@@ -12,6 +12,7 @@ from .annotation import ViewAnnotationOptions, ExportVersion
|
|
|
12
12
|
from ..services.api_client import ApiClient
|
|
13
13
|
from ..services.api_client import client as client_api
|
|
14
14
|
import json
|
|
15
|
+
from typing import List
|
|
15
16
|
import requests
|
|
16
17
|
|
|
17
18
|
logger = logging.getLogger(name='dtlpy')
|
|
@@ -223,7 +224,7 @@ class Item(entities.BaseEntity):
|
|
|
223
224
|
def set_repositories(self):
|
|
224
225
|
reps = namedtuple('repositories',
|
|
225
226
|
field_names=['annotations', 'datasets', 'items', 'codebases', 'artifacts', 'modalities',
|
|
226
|
-
'features', 'assignments', 'tasks', 'resource_executions'])
|
|
227
|
+
'features', 'assignments', 'tasks', 'resource_executions', 'collections'])
|
|
227
228
|
reps.__new__.__defaults__ = (None, None, None, None, None, None, None, None, None)
|
|
228
229
|
|
|
229
230
|
if self._dataset is None:
|
|
@@ -270,7 +271,8 @@ class Item(entities.BaseEntity):
|
|
|
270
271
|
client_api=self._client_api,
|
|
271
272
|
project=self._project,
|
|
272
273
|
resource=self
|
|
273
|
-
)
|
|
274
|
+
),
|
|
275
|
+
collections=repositories.Collections(client_api=self._client_api, item=self, dataset=self._dataset)
|
|
274
276
|
)
|
|
275
277
|
return r
|
|
276
278
|
|
|
@@ -313,6 +315,11 @@ class Item(entities.BaseEntity):
|
|
|
313
315
|
def features(self):
|
|
314
316
|
assert isinstance(self._repositories.features, repositories.Features)
|
|
315
317
|
return self._repositories.features
|
|
318
|
+
|
|
319
|
+
@property
|
|
320
|
+
def collections(self):
|
|
321
|
+
assert isinstance(self._repositories.collections, repositories.Collections)
|
|
322
|
+
return self._repositories.collections
|
|
316
323
|
|
|
317
324
|
##############
|
|
318
325
|
# Properties #
|
|
@@ -770,6 +777,53 @@ class Item(entities.BaseEntity):
|
|
|
770
777
|
if tags.get(subset) is True:
|
|
771
778
|
return subset
|
|
772
779
|
return None
|
|
780
|
+
|
|
781
|
+
def assign_collection(self, collections: List[str]) -> bool:
|
|
782
|
+
"""
|
|
783
|
+
Assign this item to one or more collections.
|
|
784
|
+
|
|
785
|
+
:param collections: List of collection names to assign the item to.
|
|
786
|
+
:return: True if the assignment was successful, otherwise False.
|
|
787
|
+
"""
|
|
788
|
+
return self.collections.assign(dataset_id=self.dataset_id, collections=collections, item_id=self.id,)
|
|
789
|
+
|
|
790
|
+
def unassign_collection(self, collections: List[str]) -> bool:
|
|
791
|
+
"""
|
|
792
|
+
Unassign this item from one or more collections.
|
|
793
|
+
|
|
794
|
+
:param collections: List of collection names to unassign the item from.
|
|
795
|
+
:return: True if the unassignment was successful, otherwise False.
|
|
796
|
+
"""
|
|
797
|
+
return self.collections.unassign(dataset_id=self.dataset_id, item_id=self.id, collections=collections)
|
|
798
|
+
|
|
799
|
+
def list_collections(self) -> List[dict]:
|
|
800
|
+
"""
|
|
801
|
+
List all collections associated with this item.
|
|
802
|
+
|
|
803
|
+
:return: A list of dictionaries containing collection keys and their respective names.
|
|
804
|
+
Each dictionary has the structure: {"key": <collection_key>, "name": <collection_name>}.
|
|
805
|
+
"""
|
|
806
|
+
collections = self.metadata.get("system", {}).get("collections", {})
|
|
807
|
+
if not isinstance(collections, dict):
|
|
808
|
+
# Ensure collections is a dictionary
|
|
809
|
+
return []
|
|
810
|
+
|
|
811
|
+
# Retrieve collection names by their keys
|
|
812
|
+
return [
|
|
813
|
+
{"key": key, "name": self.collections.get_name_by_key(key)}
|
|
814
|
+
for key in collections.keys()
|
|
815
|
+
]
|
|
816
|
+
|
|
817
|
+
def list_missing_collections(self) -> List[str]:
|
|
818
|
+
"""
|
|
819
|
+
List all items in the dataset that are not assigned to any collection.
|
|
820
|
+
|
|
821
|
+
:return: A list of item IDs that are not part of any collection.
|
|
822
|
+
"""
|
|
823
|
+
filters = entities.Filters()
|
|
824
|
+
filters.add(field='metadata.system.collections', values=None)
|
|
825
|
+
filters.add(field='datasetId', values=self._dataset.id)
|
|
826
|
+
return self._dataset.items.list(filters=filters)
|
|
773
827
|
|
|
774
828
|
class ModalityTypeEnum(str, Enum):
|
|
775
829
|
"""
|
dtlpy/entities/ontology.py
CHANGED
|
@@ -766,7 +766,7 @@ class Ontology(entities.BaseEntity):
|
|
|
766
766
|
# TODO: Add support for import from ontology entity in the Future
|
|
767
767
|
if not self._use_attributes_2:
|
|
768
768
|
raise ValueError("This method is only supported for attributes 2 mode!")
|
|
769
|
-
new_ontology = self.from_json(_json=ontology_json, client_api=self._client_api
|
|
769
|
+
new_ontology = self.from_json(_json=ontology_json, client_api=self._client_api)
|
|
770
770
|
|
|
771
771
|
# Update 'labels' and 'attributes'
|
|
772
772
|
self.labels = new_ontology.labels
|
|
@@ -794,6 +794,9 @@ class Ontology(entities.BaseEntity):
|
|
|
794
794
|
attribute_range=attribute_range
|
|
795
795
|
)
|
|
796
796
|
|
|
797
|
+
# Get remote updated 'attributes'
|
|
798
|
+
self.metadata["attributes"] = self.ontologies.get(ontology_id=self.id).attributes
|
|
799
|
+
|
|
797
800
|
# Update 'instance map' and 'color map'
|
|
798
801
|
self._instance_map = new_ontology.instance_map
|
|
799
802
|
self._color_map = new_ontology.color_map
|
dtlpy/repositories/__init__.py
CHANGED
|
@@ -0,0 +1,296 @@
|
|
|
1
|
+
from venv import logger
|
|
2
|
+
from dtlpy import entities, exceptions, repositories
|
|
3
|
+
from dtlpy.entities.dataset import Dataset
|
|
4
|
+
from dtlpy.entities.filters import FiltersMethod
|
|
5
|
+
from dtlpy.services.api_client import ApiClient
|
|
6
|
+
from typing import List
|
|
7
|
+
|
|
8
|
+
class Collections:
|
|
9
|
+
def __init__(self,
|
|
10
|
+
client_api: ApiClient,
|
|
11
|
+
item: entities.Item = None,
|
|
12
|
+
dataset: entities.Dataset = None
|
|
13
|
+
):
|
|
14
|
+
self._client_api = client_api
|
|
15
|
+
self._dataset = dataset
|
|
16
|
+
self._item = item
|
|
17
|
+
|
|
18
|
+
def create(self, name: str) -> entities.Collection:
|
|
19
|
+
"""
|
|
20
|
+
Creates a new collection in the dataset.
|
|
21
|
+
|
|
22
|
+
:param name: The name of the new collection.
|
|
23
|
+
:return: The created collection details.
|
|
24
|
+
"""
|
|
25
|
+
dataset_id = self._dataset.id
|
|
26
|
+
self.validate_max_collections()
|
|
27
|
+
self.validate_collection_name(name)
|
|
28
|
+
payload = {"name": name}
|
|
29
|
+
success, response = self._client_api.gen_request(
|
|
30
|
+
req_type="post", path=f"/datasets/{dataset_id}/items/collections", json_req=payload
|
|
31
|
+
)
|
|
32
|
+
if success:
|
|
33
|
+
collection_json = self._single_collection(data=response.json(), name=name)
|
|
34
|
+
return entities.Collection.from_json(client_api=self._client_api, _json=collection_json)
|
|
35
|
+
else:
|
|
36
|
+
raise exceptions.PlatformException(response)
|
|
37
|
+
|
|
38
|
+
def update(self, collection_name: str, new_name: str) -> entities.Collection:
|
|
39
|
+
"""
|
|
40
|
+
Updates the name of an existing collection.
|
|
41
|
+
|
|
42
|
+
:param collection_id: The ID of the collection to update.
|
|
43
|
+
:param new_name: The new name for the collection.
|
|
44
|
+
:return: The updated collection details.
|
|
45
|
+
"""
|
|
46
|
+
dataset_id = self._dataset.id
|
|
47
|
+
self.validate_collection_name(new_name)
|
|
48
|
+
payload = {"name": new_name}
|
|
49
|
+
success, response = self._client_api.gen_request(
|
|
50
|
+
req_type="patch", path=f"/datasets/{dataset_id}/items/collections/{collection_name}", json_req=payload
|
|
51
|
+
)
|
|
52
|
+
if success:
|
|
53
|
+
collection_json = self._single_collection(data=response.json(), name=new_name)
|
|
54
|
+
return entities.Collection.from_json(client_api=self._client_api, _json=collection_json)
|
|
55
|
+
else:
|
|
56
|
+
raise exceptions.PlatformException(response)
|
|
57
|
+
|
|
58
|
+
def delete(self, collection_name: str) -> bool:
|
|
59
|
+
"""
|
|
60
|
+
Deletes a collection from the dataset.
|
|
61
|
+
|
|
62
|
+
:param collection_name: The name of the collection to delete.
|
|
63
|
+
"""
|
|
64
|
+
dataset_id = self._dataset.id
|
|
65
|
+
success, response = self._client_api.gen_request(
|
|
66
|
+
req_type="delete", path=f"/datasets/{dataset_id}/items/collections/{collection_name}"
|
|
67
|
+
)
|
|
68
|
+
if success:
|
|
69
|
+
# Wait for the split operation to complete
|
|
70
|
+
command = entities.Command.from_json(_json=response.json(),
|
|
71
|
+
client_api=self._client_api)
|
|
72
|
+
command.wait()
|
|
73
|
+
return True
|
|
74
|
+
else:
|
|
75
|
+
raise exceptions.PlatformException(response)
|
|
76
|
+
|
|
77
|
+
def clone(self, collection_name: str) -> dict:
|
|
78
|
+
"""
|
|
79
|
+
Clones an existing collection, creating a new one with a unique name.
|
|
80
|
+
|
|
81
|
+
:param collection_name: The name of the collection to clone.
|
|
82
|
+
:return: The cloned collection details as a dictionary.
|
|
83
|
+
"""
|
|
84
|
+
self.validate_max_collections()
|
|
85
|
+
collections = self.list_all_collections()
|
|
86
|
+
original_collection = next((c for c in collections if c["name"] == collection_name), None)
|
|
87
|
+
|
|
88
|
+
if not original_collection:
|
|
89
|
+
raise ValueError(f"Collection with name '{collection_name}' not found.")
|
|
90
|
+
|
|
91
|
+
source_name = original_collection["name"]
|
|
92
|
+
num = 0
|
|
93
|
+
clone_name = ""
|
|
94
|
+
while True:
|
|
95
|
+
num += 1
|
|
96
|
+
clone_name = f"{source_name}-clone-{num}"
|
|
97
|
+
if not any(c["name"] == clone_name for c in collections): # Use c["name"] for comparison
|
|
98
|
+
break
|
|
99
|
+
|
|
100
|
+
# Create the cloned collection
|
|
101
|
+
cloned_collection = self.create(name=clone_name)
|
|
102
|
+
self.assign(dataset_id=self._dataset.id, collections=[cloned_collection.name], collection_key=original_collection['key'])
|
|
103
|
+
return cloned_collection
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def list_all_collections(self) -> entities.Collection:
|
|
107
|
+
"""
|
|
108
|
+
Retrieves all collections in the dataset.
|
|
109
|
+
|
|
110
|
+
:return: A list of collections in the dataset.
|
|
111
|
+
"""
|
|
112
|
+
dataset_id = self._dataset.id
|
|
113
|
+
success, response = self._client_api.gen_request(
|
|
114
|
+
req_type="GET", path=f"/datasets/{dataset_id}/items/collections"
|
|
115
|
+
)
|
|
116
|
+
if success:
|
|
117
|
+
data = response.json()
|
|
118
|
+
return self._list_collections(data)
|
|
119
|
+
else:
|
|
120
|
+
raise exceptions.PlatformException(response)
|
|
121
|
+
|
|
122
|
+
def validate_collection_name(self, name: str):
|
|
123
|
+
"""
|
|
124
|
+
Validate that the collection name is unique.
|
|
125
|
+
|
|
126
|
+
:param name: The name of the collection to validate.
|
|
127
|
+
:raises ValueError: If a collection with the same name already exists.
|
|
128
|
+
"""
|
|
129
|
+
collections = self.list_all_collections()
|
|
130
|
+
if any(c["name"] == name for c in collections):
|
|
131
|
+
raise ValueError(f"A collection with the name '{name}' already exists.")
|
|
132
|
+
|
|
133
|
+
def validate_max_collections(self) -> None:
|
|
134
|
+
"""
|
|
135
|
+
Validates that the dataset has not exceeded the maximum allowed collections.
|
|
136
|
+
|
|
137
|
+
:raises ValueError: If the dataset has 10 or more collections.
|
|
138
|
+
"""
|
|
139
|
+
collections = self.list_all_collections()
|
|
140
|
+
if len(collections) >= 10:
|
|
141
|
+
raise ValueError("The dataset already has the maximum number of collections (10).")
|
|
142
|
+
|
|
143
|
+
def list_unassigned_items(self) -> list:
|
|
144
|
+
"""
|
|
145
|
+
List unassigned items in a dataset (items where all collection fields are false).
|
|
146
|
+
|
|
147
|
+
:return: List of unassigned item IDs
|
|
148
|
+
:rtype: list
|
|
149
|
+
"""
|
|
150
|
+
filters = entities.Filters(method=FiltersMethod.AND) # Use AND method for all conditions
|
|
151
|
+
collection_fields = [
|
|
152
|
+
"collections0",
|
|
153
|
+
"collections1",
|
|
154
|
+
"collections2",
|
|
155
|
+
"collections3",
|
|
156
|
+
"collections4",
|
|
157
|
+
"collections5",
|
|
158
|
+
"collections6",
|
|
159
|
+
"collections7",
|
|
160
|
+
"collections8",
|
|
161
|
+
"collections9",
|
|
162
|
+
]
|
|
163
|
+
|
|
164
|
+
# Add each field to the filter with a value of False
|
|
165
|
+
for field in collection_fields:
|
|
166
|
+
filters.add(field=field, values=False, method=FiltersMethod.AND)
|
|
167
|
+
|
|
168
|
+
missing_ids = []
|
|
169
|
+
pages = self._dataset.items.list(filters=filters)
|
|
170
|
+
for page in pages:
|
|
171
|
+
for item in page:
|
|
172
|
+
# Items that pass filters mean all collections are false
|
|
173
|
+
missing_ids.append(item.id)
|
|
174
|
+
|
|
175
|
+
return missing_ids
|
|
176
|
+
|
|
177
|
+
def assign(
|
|
178
|
+
self,
|
|
179
|
+
dataset_id: str,
|
|
180
|
+
collections: List[str],
|
|
181
|
+
item_id: str = None,
|
|
182
|
+
collection_key: str = None
|
|
183
|
+
) -> bool:
|
|
184
|
+
"""
|
|
185
|
+
Assign an item to a collection. Creates the collection if it does not exist.
|
|
186
|
+
|
|
187
|
+
:param dataset_id: ID of the dataset.
|
|
188
|
+
:param collections: List of the collections to assign the item to.
|
|
189
|
+
:param item_id: (Optional) ID of the item to assign. If not provided, all items in the dataset will be updated.
|
|
190
|
+
:param collection_key: (Optional) Key for the bulk assignment. If not provided, no specific metadata will be updated.
|
|
191
|
+
:return: True if the assignment was successful, otherwise raises an exception.
|
|
192
|
+
"""
|
|
193
|
+
# Build the query structure
|
|
194
|
+
if collection_key:
|
|
195
|
+
query = {
|
|
196
|
+
"filter": {
|
|
197
|
+
f"metadata.system.collections.{collection_key}": True
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
elif item_id:
|
|
201
|
+
query = {
|
|
202
|
+
"id": {"$eq": item_id}
|
|
203
|
+
}
|
|
204
|
+
else:
|
|
205
|
+
raise ValueError("Either collection_key or item_id must be provided.")
|
|
206
|
+
|
|
207
|
+
# Create the payload
|
|
208
|
+
payload = {
|
|
209
|
+
"query": query,
|
|
210
|
+
"collections": collections,
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
# Make the API request to assign the item
|
|
214
|
+
success, response = self._client_api.gen_request(
|
|
215
|
+
req_type="post",
|
|
216
|
+
path=f"/datasets/{dataset_id}/items/collections/bulk-add",
|
|
217
|
+
json_req=payload,
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
if success:
|
|
221
|
+
# Wait for the operation to complete
|
|
222
|
+
command = entities.Command.from_json(_json=response.json(), client_api=self._client_api)
|
|
223
|
+
command.wait()
|
|
224
|
+
return True
|
|
225
|
+
else:
|
|
226
|
+
raise exceptions.PlatformException(f"Failed to assign item to collections: {response}")
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def unassign(self, dataset_id: str, item_id: str, collections: List[str]) -> bool:
|
|
230
|
+
"""
|
|
231
|
+
Unassign an item from a collection.
|
|
232
|
+
:param item_id: ID of the item.
|
|
233
|
+
:param collections: List of collection names to unassign.
|
|
234
|
+
"""
|
|
235
|
+
payload = {
|
|
236
|
+
"query": {"id": {"$eq": item_id}},
|
|
237
|
+
"collections": collections,
|
|
238
|
+
}
|
|
239
|
+
success, response = self._client_api.gen_request(
|
|
240
|
+
req_type="post",
|
|
241
|
+
path=f"/datasets/{dataset_id}/items/collections/bulk-remove",
|
|
242
|
+
json_req=payload,
|
|
243
|
+
)
|
|
244
|
+
if success:
|
|
245
|
+
# Wait for the split operation to complete
|
|
246
|
+
command = entities.Command.from_json(_json=response.json(),
|
|
247
|
+
client_api=self._client_api)
|
|
248
|
+
command.wait()
|
|
249
|
+
return True
|
|
250
|
+
else:
|
|
251
|
+
raise exceptions.PlatformException(response)
|
|
252
|
+
|
|
253
|
+
def _single_collection(sef, data: dict, name: str):
|
|
254
|
+
"""
|
|
255
|
+
Retrieves the key-value pair from the dictionary where the collection's name matches the given name.
|
|
256
|
+
|
|
257
|
+
:param data: A dictionary containing collection data in the format:
|
|
258
|
+
{ "metadata.system.collections.c0": {"name": "Justice League"}, ... }
|
|
259
|
+
:param name: The name of the collection to find.
|
|
260
|
+
:return: The key-value pair where the name matches, or None if not found.
|
|
261
|
+
"""
|
|
262
|
+
for key, value in data.items():
|
|
263
|
+
if value.get("name") == name:
|
|
264
|
+
return {key: value}
|
|
265
|
+
return None
|
|
266
|
+
|
|
267
|
+
def _list_collections(self, data: dict):
|
|
268
|
+
"""
|
|
269
|
+
Create a list of Collection entities from the dataset JSON.
|
|
270
|
+
|
|
271
|
+
:param data: The flat JSON containing collection data in the format:
|
|
272
|
+
{ "metadata.system.collections.c0": {"name": "Justice League"}, ... }
|
|
273
|
+
:return: A list of Collection entities.
|
|
274
|
+
"""
|
|
275
|
+
collections = []
|
|
276
|
+
for full_key, value in data.items():
|
|
277
|
+
if "metadata.system.collections" in full_key:
|
|
278
|
+
# Strip the prefix
|
|
279
|
+
key = full_key.replace("metadata.system.collections.", "")
|
|
280
|
+
collection_name = value.get("name")
|
|
281
|
+
collections.append({"key": key, "name": collection_name})
|
|
282
|
+
return collections
|
|
283
|
+
|
|
284
|
+
def get_name_by_key(self, key: str) -> str:
|
|
285
|
+
"""
|
|
286
|
+
Get the name of a collection by its key.
|
|
287
|
+
|
|
288
|
+
:param key: The key of the collection (e.g., 'c0', 'c1').
|
|
289
|
+
:return: The name of the collection if it exists; otherwise, an empty string.
|
|
290
|
+
"""
|
|
291
|
+
# Assuming collections is a list of dictionaries
|
|
292
|
+
collections = self.list_all_collections()
|
|
293
|
+
for collection in collections:
|
|
294
|
+
if collection.get("key") == key:
|
|
295
|
+
return collection.get("name", "")
|
|
296
|
+
return ""
|
dtlpy/repositories/computes.py
CHANGED
|
@@ -8,6 +8,7 @@ from typing import List, Optional, Dict
|
|
|
8
8
|
from ..entities import ComputeCluster, ComputeContext, ComputeType, Project
|
|
9
9
|
from ..entities.integration import IntegrationType
|
|
10
10
|
|
|
11
|
+
|
|
11
12
|
class Computes:
|
|
12
13
|
|
|
13
14
|
def __init__(self, client_api: ApiClient):
|
|
@@ -44,7 +45,8 @@ class Computes:
|
|
|
44
45
|
type: entities.ComputeType = entities.ComputeType.KUBERNETES,
|
|
45
46
|
is_global: Optional[bool] = False,
|
|
46
47
|
features: Optional[Dict] = None,
|
|
47
|
-
wait=True
|
|
48
|
+
wait=True,
|
|
49
|
+
status: entities.ComputeStatus = None
|
|
48
50
|
):
|
|
49
51
|
"""
|
|
50
52
|
Create a new compute
|
|
@@ -57,6 +59,7 @@ class Computes:
|
|
|
57
59
|
:param is_global: Is global
|
|
58
60
|
:param features: Features
|
|
59
61
|
:param wait: Wait for compute creation
|
|
62
|
+
:param status: Compute status
|
|
60
63
|
:return: Compute
|
|
61
64
|
"""
|
|
62
65
|
|
|
@@ -67,7 +70,8 @@ class Computes:
|
|
|
67
70
|
'global': is_global,
|
|
68
71
|
'features': features,
|
|
69
72
|
'shared_contexts': [sc.to_json() for sc in shared_contexts],
|
|
70
|
-
'cluster': cluster.to_json()
|
|
73
|
+
'cluster': cluster.to_json(),
|
|
74
|
+
'status': status
|
|
71
75
|
}
|
|
72
76
|
|
|
73
77
|
# request
|
|
@@ -86,7 +90,7 @@ class Computes:
|
|
|
86
90
|
)
|
|
87
91
|
|
|
88
92
|
if wait:
|
|
89
|
-
command_id = compute.metadata.get('system', {}).get('commands', {}).get('create',
|
|
93
|
+
command_id = compute.metadata.get('system', {}).get('commands', {}).get('create', None)
|
|
90
94
|
if command_id is not None:
|
|
91
95
|
command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
|
|
92
96
|
command.wait()
|
|
@@ -200,7 +204,8 @@ class Computes:
|
|
|
200
204
|
ComputeContext([], org_id, project_id),
|
|
201
205
|
[],
|
|
202
206
|
cluster,
|
|
203
|
-
ComputeType.KUBERNETES
|
|
207
|
+
ComputeType.KUBERNETES,
|
|
208
|
+
status=config['config'].get('status', None))
|
|
204
209
|
return compute
|
|
205
210
|
|
|
206
211
|
def create_from_config_file(self, config_file_path, org_id, project_name: Optional[str] = None):
|
|
@@ -215,6 +220,7 @@ class Computes:
|
|
|
215
220
|
compute = self.setup_compute_cluster(config, integration, org_id, project)
|
|
216
221
|
return compute
|
|
217
222
|
|
|
223
|
+
|
|
218
224
|
class ServiceDrivers:
|
|
219
225
|
|
|
220
226
|
def __init__(self, client_api: ApiClient):
|
dtlpy/repositories/downloader.py
CHANGED
|
@@ -674,6 +674,8 @@ class Downloader:
|
|
|
674
674
|
stream=True,
|
|
675
675
|
dataset_id=item.dataset_id)
|
|
676
676
|
if not result:
|
|
677
|
+
if os.path.isfile(local_filepath + '.download'):
|
|
678
|
+
os.remove(local_filepath + '.download')
|
|
677
679
|
raise PlatformException(response)
|
|
678
680
|
else:
|
|
679
681
|
_, ext = os.path.splitext(item.metadata['system']['shebang']['linkInfo']['ref'].split('?')[0])
|
|
@@ -114,6 +114,8 @@ class Integrations:
|
|
|
114
114
|
aws-cross - {}
|
|
115
115
|
gcp-cross - {}
|
|
116
116
|
gcp-workload-identity-federation - {"secret": "", "content": "{}", "clientId": ""}
|
|
117
|
+
private-registry (ECR) - {"name": "", "spec": {"accessKeyId": "", "secretAccessKey": "", "account": "", "region": ""}}
|
|
118
|
+
private-registry (GAR) - {"name": "", "spec": {"password": ""}} (can use generate_gar_options to generate the options)
|
|
117
119
|
|
|
118
120
|
**Prerequisites**: You must be an *owner* in the organization.
|
|
119
121
|
|
|
@@ -129,7 +131,7 @@ class Integrations:
|
|
|
129
131
|
.. code-block:: python
|
|
130
132
|
|
|
131
133
|
project.integrations.create(integrations_type=dl.IntegrationType.S3,
|
|
132
|
-
name='
|
|
134
|
+
name='S3Integration',
|
|
133
135
|
options={key: "Access key ID", secret: "Secret access key"})
|
|
134
136
|
"""
|
|
135
137
|
|
|
@@ -144,7 +146,9 @@ class Integrations:
|
|
|
144
146
|
organization_id = self.org.id
|
|
145
147
|
|
|
146
148
|
url_path = '/orgs/{}/integrations'.format(organization_id)
|
|
147
|
-
payload = {"type": integrations_type.value if isinstance(integrations_type,
|
|
149
|
+
payload = {"type": integrations_type.value if isinstance(integrations_type,
|
|
150
|
+
entities.IntegrationType) else integrations_type,
|
|
151
|
+
'name': name, 'options': options}
|
|
148
152
|
if metadata is not None:
|
|
149
153
|
payload['metadata'] = metadata
|
|
150
154
|
success, response = self._client_api.gen_request(req_type='post',
|
|
@@ -300,21 +304,7 @@ class Integrations:
|
|
|
300
304
|
available_integrations = miscellaneous.List(response.json())
|
|
301
305
|
return available_integrations
|
|
302
306
|
|
|
303
|
-
def
|
|
304
|
-
password = self.__create_gar_password(service_account, location)
|
|
305
|
-
return self.create(
|
|
306
|
-
integrations_type='private-registry',
|
|
307
|
-
name='gar-1',
|
|
308
|
-
metadata={"provider": "gcp"},
|
|
309
|
-
options={
|
|
310
|
-
"name": "_json_key",
|
|
311
|
-
"spec": {
|
|
312
|
-
"password": password
|
|
313
|
-
}
|
|
314
|
-
}
|
|
315
|
-
)
|
|
316
|
-
|
|
317
|
-
def __create_gar_password(self, service_account: str, location: str) -> str:
|
|
307
|
+
def generate_gar_options(self, service_account: str, location: str) -> dict:
|
|
318
308
|
"""
|
|
319
309
|
Generates a Google Artifact Registry JSON configuration and returns it as a base64-encoded string.
|
|
320
310
|
|
|
@@ -348,4 +338,9 @@ class Integrations:
|
|
|
348
338
|
}
|
|
349
339
|
}
|
|
350
340
|
|
|
351
|
-
return
|
|
341
|
+
return {
|
|
342
|
+
"name": "_json_key",
|
|
343
|
+
"spec": {
|
|
344
|
+
"password": str(base64.b64encode(bytes(json.dumps(encoded_pass), 'utf-8')))[2:-1]
|
|
345
|
+
}
|
|
346
|
+
}
|
dtlpy/repositories/items.py
CHANGED
|
@@ -271,7 +271,7 @@ class Items:
|
|
|
271
271
|
filters.pop(field='hidden')
|
|
272
272
|
if is_dir:
|
|
273
273
|
filters.add(field='type', values='dir')
|
|
274
|
-
|
|
274
|
+
filters.recursive = False
|
|
275
275
|
filters.add(field='filename', values=filepath)
|
|
276
276
|
paged_entity = self.list(filters=filters)
|
|
277
277
|
if len(paged_entity.items) == 0:
|
dtlpy/services/api_client.py
CHANGED
|
@@ -45,7 +45,7 @@ threadLock = threading.Lock()
|
|
|
45
45
|
|
|
46
46
|
|
|
47
47
|
def format_message(message):
|
|
48
|
-
if message:
|
|
48
|
+
if message and isinstance(message, str):
|
|
49
49
|
return message.replace('\\n', '\n')
|
|
50
50
|
return message
|
|
51
51
|
|
|
@@ -1476,7 +1476,7 @@ class ApiClient:
|
|
|
1476
1476
|
msg += '[Response <{val}>]'.format(val=resp.status_code)
|
|
1477
1477
|
if hasattr(resp, 'reason'):
|
|
1478
1478
|
msg += '[Reason: {val}]'.format(val=resp.reason)
|
|
1479
|
-
if hasattr(resp, 'text'):
|
|
1479
|
+
if hasattr(resp, 'text') and isinstance(resp.text, str):
|
|
1480
1480
|
msg += '[Text: {val}]'.format(val=format_message(resp.text))
|
|
1481
1481
|
|
|
1482
1482
|
request_id = resp.headers.get('x-request-id', 'na')
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
dtlpy/__init__.py,sha256=
|
|
2
|
-
dtlpy/__version__.py,sha256=
|
|
1
|
+
dtlpy/__init__.py,sha256=GjtFPFltVerHF1m6ePaVp5oUWcg7yavd3aNhknTip9U,20961
|
|
2
|
+
dtlpy/__version__.py,sha256=RkZdRrLtjnqMBrfIPP4HgsEaxdXnqqxv1qVlCI1mifo,20
|
|
3
3
|
dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
|
|
4
4
|
dtlpy/new_instance.py,sha256=u_c6JtgqsKCr7TU24-g7_CaST9ghqamMhM4Z0Zxt50w,10121
|
|
5
5
|
dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
|
|
@@ -13,7 +13,6 @@ dtlpy/assets/package_catalog.json,sha256=bN4aHR5shJ3_wrJioO2BesaT2g8dQrrFUWk6Zkt
|
|
|
13
13
|
dtlpy/assets/package_gitignore,sha256=IjLaxcQldWa6q7ZUajeFEKetiBCaSX4bqEY0Ayye7sE,4405
|
|
14
14
|
dtlpy/assets/project_dataset_recipe_ontology.png,sha256=PKwLXvL289IoKJukoVlSzKKLfh08AIxsUlU2s88U_20,10807
|
|
15
15
|
dtlpy/assets/voc_annotation_template.xml,sha256=exQVEGh9P8UKOvU_XtGlzpHC9TIXmSifAeFSpJ_DgfY,742
|
|
16
|
-
dtlpy/assets/__pycache__/__init__.cpython-310.pyc,sha256=7Kssmev_GFPccgJ5KEospGRxf3dv_V1Dtipl8604vX0,1113
|
|
17
16
|
dtlpy/assets/code_server/config.yaml,sha256=-2G8_dMvr5kVKXtixwj86l_lTYRlK5LKziDNoEtwEM0,48
|
|
18
17
|
dtlpy/assets/code_server/installation.sh,sha256=mOouT6A3IhU-uan8rN3abDq5tCWUu2UOX7D_fEf-U6U,838
|
|
19
18
|
dtlpy/assets/code_server/launch.json,sha256=FbZg1c1kAiDVDVENcNPj6dIxVyO1LqYlCStCo73JcjA,331
|
|
@@ -44,9 +43,9 @@ dtlpy/dlp/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
|
|
|
44
43
|
dtlpy/dlp/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
|
|
45
44
|
dtlpy/dlp/dlp.py,sha256=YjNBjeCDTXJ7tj8qdiGZ8lFb8DtPZl-FvViyjxt9xF8,4278
|
|
46
45
|
dtlpy/dlp/parser.py,sha256=p-TFaiAU2c3QkI97TXzL2LDR3Eq0hGDFrTc9J2jWLh4,30551
|
|
47
|
-
dtlpy/entities/__init__.py,sha256=
|
|
46
|
+
dtlpy/entities/__init__.py,sha256=HQ2p5IWmBqT5oG908poiDsSsQOnESsV_Y2rHASEHdcs,4943
|
|
48
47
|
dtlpy/entities/analytic.py,sha256=5MpYDKPVsZ1MIy20Ju515RWed6P667j4TLxsan2gyNM,11925
|
|
49
|
-
dtlpy/entities/annotation.py,sha256=
|
|
48
|
+
dtlpy/entities/annotation.py,sha256=L1g1OyyOFC60Eo5PWnp8KPNUGPqNCQjlnAvLbT96QQI,68006
|
|
50
49
|
dtlpy/entities/annotation_collection.py,sha256=CEYSBHhhDkC0VJdHsBSrA6TgdKGMcKeI3tFM40UJwS8,29838
|
|
51
50
|
dtlpy/entities/app.py,sha256=dVd87-mP22NWvec5nqA5VjZ8Qk3aJlgUcloIAAOAPUw,6968
|
|
52
51
|
dtlpy/entities/app_module.py,sha256=0UiAbBX1q8iEImi3nY7ySWZZHoRRwu0qUXmyXmgVAc4,3645
|
|
@@ -55,25 +54,26 @@ dtlpy/entities/assignment.py,sha256=Dc1QcfVf67GGcmDDi4ubESDuPkSgjXqdqjTBQ31faUM,
|
|
|
55
54
|
dtlpy/entities/base_entity.py,sha256=i83KrtAz6dX4t8JEiUimLI5ZRrN0VnoUWKG2Zz49N5w,6518
|
|
56
55
|
dtlpy/entities/bot.py,sha256=is3NUCnPg56HSjsHIvFcVkymValMqDV0uHRDC1Ib-ds,3819
|
|
57
56
|
dtlpy/entities/codebase.py,sha256=pwRkAq2GV0wvmzshg89IAmE-0I2Wsy_-QNOu8OV8uqc,8999
|
|
58
|
-
dtlpy/entities/
|
|
59
|
-
dtlpy/entities/
|
|
60
|
-
dtlpy/entities/
|
|
57
|
+
dtlpy/entities/collection.py,sha256=FPPPfIxOsBG1ujORPJVq8uXyF8vhIqC6N4EiI9SJzl0,1160
|
|
58
|
+
dtlpy/entities/command.py,sha256=FtfsO6kQSZqKn-Uo8n2ryGOB01Fgr-g5ewfMCtRMTfw,5247
|
|
59
|
+
dtlpy/entities/compute.py,sha256=LcrFnFwIm5HQYvtmzvZ2Eh6hvdkefTByJ9YmwqED57c,14641
|
|
60
|
+
dtlpy/entities/dataset.py,sha256=GedPdhsQlWQo5H9CLsTZCqnTsyV1ppJUCwG4hukGBy0,50672
|
|
61
61
|
dtlpy/entities/directory_tree.py,sha256=Rni6pLSWytR6yeUPgEdCCRfTg_cqLOdUc9uCqz9KT-Q,1186
|
|
62
62
|
dtlpy/entities/dpk.py,sha256=FJVhQKk2fj1cO_4rcE_bIF6QmIQZQWUkBnwTNQNMrfE,17857
|
|
63
63
|
dtlpy/entities/driver.py,sha256=O_QdK1EaLjQyQkmvKsmkNgmvmMb1mPjKnJGxK43KrOA,7197
|
|
64
|
-
dtlpy/entities/execution.py,sha256=
|
|
64
|
+
dtlpy/entities/execution.py,sha256=uQe535w9OcAoDiNWf96KcpFzUDEUU-DYsUalv5VziyM,13673
|
|
65
65
|
dtlpy/entities/feature.py,sha256=9fFjD0W57anOVSAVU55ypxN_WTCsWTG03Wkc3cAAj78,3732
|
|
66
66
|
dtlpy/entities/feature_set.py,sha256=niw4MkmrDbD_LWQu1X30uE6U4DCzmFhPTaYeZ6VZDB0,4443
|
|
67
67
|
dtlpy/entities/filters.py,sha256=PUmgor77m3CWeUgvCdWMg3Bt5SxHXPVBbN5VmD_dglQ,22683
|
|
68
68
|
dtlpy/entities/gis_item.py,sha256=Uk-wMBxwcHsImjz4qOjP-EyZAohbRzN43kMpCaVjCXU,3982
|
|
69
69
|
dtlpy/entities/integration.py,sha256=Kdy1j6-cJLW8qNmnqCmdg36phi843YDrlMqcMyMfvYk,5875
|
|
70
|
-
dtlpy/entities/item.py,sha256=
|
|
70
|
+
dtlpy/entities/item.py,sha256=n_zJYr_QlUSBVdYu6oqgXf0JfIFRW1LBcIMMpu4vP38,34074
|
|
71
71
|
dtlpy/entities/label.py,sha256=ycDYavIgKhz806plIX-64c07_TeHpDa-V7LnfFVe4Rg,3869
|
|
72
72
|
dtlpy/entities/links.py,sha256=FAmEwHtsrqKet3c0UHH9u_gHgG6_OwF1-rl4xK7guME,2516
|
|
73
73
|
dtlpy/entities/message.py,sha256=ApJuaKEqxATpXjNYUjGdYPu3ibQzEMo8-LtJ_4xAcPI,5865
|
|
74
74
|
dtlpy/entities/model.py,sha256=YwjIi3MxAZoyartTvqx_qhtDKQe6zVsQuwZbYLygMxU,26898
|
|
75
75
|
dtlpy/entities/node.py,sha256=yPPYDLtNMc6vZbbf4FIffY86y7tkaTvYm42Jb7k3Ofk,39617
|
|
76
|
-
dtlpy/entities/ontology.py,sha256=
|
|
76
|
+
dtlpy/entities/ontology.py,sha256=924g9c2ZTfr69fWd_ejrVU0C-MAUR8UAhhz6GY-IQME,32100
|
|
77
77
|
dtlpy/entities/organization.py,sha256=Zm-tTHV82PvYyTNetRRXqvmvzBCbXEwS-gAENf7Zny4,9874
|
|
78
78
|
dtlpy/entities/package.py,sha256=QSDePHlp4ik19aUE3dAUC7edh0oUUVjzSmMG867avc4,26363
|
|
79
79
|
dtlpy/entities/package_defaults.py,sha256=wTD7Z7rGYjVy8AcUxTFEnkOkviiJaLVZYvduiUBKNZo,211
|
|
@@ -96,7 +96,7 @@ dtlpy/entities/trigger.py,sha256=Spf5G3n1PsD3mDntwbAsc-DpEGDlqKgU9ec0Q0HinsQ,142
|
|
|
96
96
|
dtlpy/entities/user.py,sha256=hqEzwN6rl1oUTpKOV5eXvw9Z7dtpsiC4TAPSNBmkqcM,3865
|
|
97
97
|
dtlpy/entities/webhook.py,sha256=6R06MgLxabvKySInGlSJmaf0AVmAMe3vKusWhqONRyU,3539
|
|
98
98
|
dtlpy/entities/annotation_definitions/__init__.py,sha256=qZ77hGmCQopPSpiDHYhNWbNKC7nrn10NWNlim9dINmg,666
|
|
99
|
-
dtlpy/entities/annotation_definitions/base_annotation_definition.py,sha256=
|
|
99
|
+
dtlpy/entities/annotation_definitions/base_annotation_definition.py,sha256=tZGMokakJ4HjWAtD1obsgh2pORD66XWcnIT6CZLVMQs,3201
|
|
100
100
|
dtlpy/entities/annotation_definitions/box.py,sha256=kNT_Ba7QWKBiyt1uPAmYLyBfPsxvIUNLhVe9042WFnM,8622
|
|
101
101
|
dtlpy/entities/annotation_definitions/classification.py,sha256=uqLAAaqNww2ZwR1e4UW22foJtDxoeZXJsv5PTvyt-tA,1559
|
|
102
102
|
dtlpy/entities/annotation_definitions/comparison.py,sha256=cp9HZ32wm7E78tbeoqsfJL5oZ26ojig7Cjn2FJE7mbI,1806
|
|
@@ -153,7 +153,7 @@ dtlpy/ml/metrics.py,sha256=BG2E-1Mvjv2e2No9mIJKVmvzqBvLqytKcw3hA7wVUNc,20037
|
|
|
153
153
|
dtlpy/ml/predictions_utils.py,sha256=He_84U14oS2Ss7T_-Zj5GDiBZwS-GjMPURUh7u7DjF8,12484
|
|
154
154
|
dtlpy/ml/summary_writer.py,sha256=dehDi8zmGC1sAGyy_3cpSWGXoGQSiQd7bL_Thoo8yIs,2784
|
|
155
155
|
dtlpy/ml/train_utils.py,sha256=R-BHKRfqDoLLhFyLzsRFyJ4E-8iedj9s9oZqy3IO2rg,2404
|
|
156
|
-
dtlpy/repositories/__init__.py,sha256=
|
|
156
|
+
dtlpy/repositories/__init__.py,sha256=b7jPmE4meKaeikO-x87HcO2lcfQg-8OzqcYZa8n6l-Q,2033
|
|
157
157
|
dtlpy/repositories/analytics.py,sha256=dQPCYTPAIuyfVI_ppR49W7_GBj0033feIm9Gd7LW1V0,2966
|
|
158
158
|
dtlpy/repositories/annotations.py,sha256=b6Y9K9Yj_EaavMMrdtDG0QfhsLpz0lYpwMecTaNPmG4,42453
|
|
159
159
|
dtlpy/repositories/apps.py,sha256=J-PDCPWVtvTLmzzkABs2-8zo9hGLk_z_sNR2JB1mB0c,15752
|
|
@@ -161,18 +161,19 @@ dtlpy/repositories/artifacts.py,sha256=Ke2ustTNw-1eQ0onLsWY7gL2aChjXPAX5p1uQ_EzM
|
|
|
161
161
|
dtlpy/repositories/assignments.py,sha256=1VwJZ7ctQe1iaDDDpeYDgoj2G-TCgzolVLUEqUocd2w,25506
|
|
162
162
|
dtlpy/repositories/bots.py,sha256=q1SqH01JHloljKxknhHU09psV1vQx9lPhu3g8mBBeRg,8104
|
|
163
163
|
dtlpy/repositories/codebases.py,sha256=pvcZxdrq0-zWysVbdXjUOhnfcF6hJD8v5VclNZ-zhGA,24668
|
|
164
|
+
dtlpy/repositories/collections.py,sha256=C_BPMg128Sl9AG3U4PxgI_2aaehQ2NuehMmzoTaXbPQ,11459
|
|
164
165
|
dtlpy/repositories/commands.py,sha256=i6gQgOmRDG8ixqKU7672H3CvGt8VLT3ihDVfri1eWWc,5610
|
|
165
166
|
dtlpy/repositories/compositions.py,sha256=H417BvlQAiWr5NH2eANFke6CfEO5o7DSvapYpf7v5Hk,2150
|
|
166
|
-
dtlpy/repositories/computes.py,sha256=
|
|
167
|
+
dtlpy/repositories/computes.py,sha256=l0-FS3_8WEGG5tbtIR3ltsZc6MyHVkiYajHTCaeUugk,10156
|
|
167
168
|
dtlpy/repositories/datasets.py,sha256=SpG86uToq-E5nVHMwHgWx6VwwwkgfYo8x5vZ0WA3Ouw,56546
|
|
168
|
-
dtlpy/repositories/downloader.py,sha256=
|
|
169
|
+
dtlpy/repositories/downloader.py,sha256=CiT8KIjJ8l52Ng003f2_bmolIpe64fi8A_GGEl39M1Y,44254
|
|
169
170
|
dtlpy/repositories/dpks.py,sha256=dglvaiSFBvEithhlQ0RAXwzTxoZaICONs-owx3e2nfU,17848
|
|
170
171
|
dtlpy/repositories/drivers.py,sha256=fF0UuHCyBzop8pHfryex23mf0kVFAkqzNdOmwBbaWxY,10204
|
|
171
172
|
dtlpy/repositories/executions.py,sha256=4UoU6bnB3kl5cMuF1eJvDecfZCaB06gKWxPfv6_g1_k,32598
|
|
172
173
|
dtlpy/repositories/feature_sets.py,sha256=UowMDAl_CRefRB5oZzubnsjU_OFgiPPdQXn8q2j4Kuw,9666
|
|
173
174
|
dtlpy/repositories/features.py,sha256=A_RqTJxzjTh-Wbm0uXaoTNyHSfCLbeiH38iB11p2ifY,9915
|
|
174
|
-
dtlpy/repositories/integrations.py,sha256=
|
|
175
|
-
dtlpy/repositories/items.py,sha256=
|
|
175
|
+
dtlpy/repositories/integrations.py,sha256=sWij_MbxeAlCs3uDRGGKPX__T-h_mVppe4bErkCGIyM,14102
|
|
176
|
+
dtlpy/repositories/items.py,sha256=AF8h7-Yje1p16nXyofNLiC92bRVZtZjtHRPvHwbW62w,38423
|
|
176
177
|
dtlpy/repositories/messages.py,sha256=QU0Psckg6CA_Tlw9AVxqa-Ay1fRM4n269sSIJkH9o7E,3066
|
|
177
178
|
dtlpy/repositories/models.py,sha256=IekNMcnuKVaAVTJf2AJv6YvX5qCd9kkSl4ETPMWP4Zc,38213
|
|
178
179
|
dtlpy/repositories/nodes.py,sha256=xXJm_YA0vDUn0dVvaGeq6ORM0vI3YXvfjuylvGRtkxo,3061
|
|
@@ -195,7 +196,7 @@ dtlpy/repositories/uploader.py,sha256=5qQbsg701HrL8x0wWCRLPBP_dztqXEb31QfeZnh0SQ
|
|
|
195
196
|
dtlpy/repositories/webhooks.py,sha256=IIpxOJ-7KeQp1TY9aJZz-FuycSjAoYx0TDk8z86KAK8,9033
|
|
196
197
|
dtlpy/services/__init__.py,sha256=VfVJy2otIrDra6i7Sepjyez2ujiE6171ChQZp-YgxsM,904
|
|
197
198
|
dtlpy/services/aihttp_retry.py,sha256=tgntZsAY0dW9v08rkjX1T5BLNDdDd8svtgn7nH8DSGU,5022
|
|
198
|
-
dtlpy/services/api_client.py,sha256=
|
|
199
|
+
dtlpy/services/api_client.py,sha256=pz4rScAVzJ1B13qXy2N0Oa3sfKDyR8X1UeslbR-rAb0,73173
|
|
199
200
|
dtlpy/services/api_reference.py,sha256=cW-B3eoi9Xs3AwI87_Kr6GV_E6HPoC73aETFaGz3A-0,1515
|
|
200
201
|
dtlpy/services/async_utils.py,sha256=kaYHTPw0Lg8PeJJq8whPyzrBYkzD7offs5hsKRZXJm8,3960
|
|
201
202
|
dtlpy/services/calls_counter.py,sha256=gr0io5rIsO5-7Cgc8neA1vK8kUtYhgFPmDQ2jXtiZZs,1036
|
|
@@ -223,19 +224,19 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
|
|
|
223
224
|
dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
|
|
224
225
|
dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
|
|
225
226
|
dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
|
|
226
|
-
dtlpy-1.
|
|
227
|
-
dtlpy-1.
|
|
228
|
-
dtlpy-1.
|
|
227
|
+
dtlpy-1.105.6.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
|
|
228
|
+
dtlpy-1.105.6.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
|
|
229
|
+
dtlpy-1.105.6.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
|
|
229
230
|
tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
230
231
|
tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
231
232
|
tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
|
|
232
233
|
tests/assets/models_flow/main.py,sha256=vnDKyVZaae2RFpvwS22Hzi6Dt2LJerH4yQrmKtaT8_g,2123
|
|
233
234
|
tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
|
|
234
235
|
tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
235
|
-
tests/features/environment.py,sha256=
|
|
236
|
-
dtlpy-1.
|
|
237
|
-
dtlpy-1.
|
|
238
|
-
dtlpy-1.
|
|
239
|
-
dtlpy-1.
|
|
240
|
-
dtlpy-1.
|
|
241
|
-
dtlpy-1.
|
|
236
|
+
tests/features/environment.py,sha256=TMeUzSZkksHqbxNBDLk-LYBMD4G5dMo4ZLZXPwQImVE,18751
|
|
237
|
+
dtlpy-1.105.6.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
238
|
+
dtlpy-1.105.6.dist-info/METADATA,sha256=cB65EU8nW0Ju7hoUi4qtuoZ37dQcofrvBcf1rs0D-ww,3019
|
|
239
|
+
dtlpy-1.105.6.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
|
240
|
+
dtlpy-1.105.6.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
|
|
241
|
+
dtlpy-1.105.6.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
|
|
242
|
+
dtlpy-1.105.6.dist-info/RECORD,,
|
tests/features/environment.py
CHANGED
|
@@ -282,6 +282,11 @@ def after_tag(context, tag):
|
|
|
282
282
|
use_fixture(restore_json_file, context)
|
|
283
283
|
except Exception:
|
|
284
284
|
logging.exception('Failed to restore json file')
|
|
285
|
+
elif tag == 'compute_serviceDriver.delete':
|
|
286
|
+
try:
|
|
287
|
+
use_fixture(delete_compute_servicedriver, context)
|
|
288
|
+
except Exception:
|
|
289
|
+
logging.exception('Failed to delete service')
|
|
285
290
|
elif tag == 'frozen_dataset':
|
|
286
291
|
pass
|
|
287
292
|
elif 'testrail-C' in tag:
|
|
@@ -504,7 +509,36 @@ def models_delete(context):
|
|
|
504
509
|
assert all_deleted
|
|
505
510
|
|
|
506
511
|
|
|
512
|
+
def delete_compute_servicedriver(context):
|
|
513
|
+
if not hasattr(context, 'to_delete_computes_ids') and not hasattr(context, 'to_delete_service_drivers_ids'):
|
|
514
|
+
return
|
|
515
|
+
|
|
516
|
+
all_deleted = True
|
|
517
|
+
for service_driver_id in context.to_delete_service_drivers_ids:
|
|
518
|
+
try:
|
|
519
|
+
context.dl.service_drivers.delete(service_driver_id=service_driver_id)
|
|
520
|
+
except context.dl.exceptions.NotFound:
|
|
521
|
+
pass
|
|
522
|
+
except:
|
|
523
|
+
all_deleted = False
|
|
524
|
+
logging.exception('Failed deleting serviceDriver: {}'.format(service_driver_id))
|
|
525
|
+
assert all_deleted
|
|
526
|
+
|
|
527
|
+
all_deleted = True
|
|
528
|
+
for compute_id in context.to_delete_computes_ids:
|
|
529
|
+
try:
|
|
530
|
+
context.dl.computes.delete(compute_id=compute_id)
|
|
531
|
+
except context.dl.exceptions.NotFound:
|
|
532
|
+
pass
|
|
533
|
+
except:
|
|
534
|
+
all_deleted = False
|
|
535
|
+
logging.exception('Failed deleting compute: {}'.format(compute_id))
|
|
536
|
+
assert all_deleted
|
|
537
|
+
|
|
538
|
+
|
|
507
539
|
def restore_json_file(context):
|
|
540
|
+
if not hasattr(context.feature, 'dataloop_feature_project'):
|
|
541
|
+
return
|
|
508
542
|
if not hasattr(context, 'backup_path') or not hasattr(context, 'original_path'):
|
|
509
543
|
assert False, 'Please make sure to set the original_path and backup_path in the context'
|
|
510
544
|
# Restore the file from the backup
|
|
Binary file
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|