dtlpy 1.102.14__py3-none-any.whl → 1.104.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. dtlpy/__init__.py +1 -0
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/entities/__init__.py +1 -0
  4. dtlpy/entities/annotation.py +17 -3
  5. dtlpy/entities/annotation_definitions/base_annotation_definition.py +13 -4
  6. dtlpy/entities/collection.py +39 -0
  7. dtlpy/entities/command.py +10 -5
  8. dtlpy/entities/compute.py +59 -6
  9. dtlpy/entities/dataset.py +9 -5
  10. dtlpy/entities/dpk.py +9 -9
  11. dtlpy/entities/execution.py +6 -0
  12. dtlpy/entities/filters.py +2 -2
  13. dtlpy/entities/integration.py +0 -1
  14. dtlpy/entities/item.py +56 -2
  15. dtlpy/entities/organization.py +5 -5
  16. dtlpy/ml/base_model_adapter.py +8 -8
  17. dtlpy/repositories/__init__.py +1 -0
  18. dtlpy/repositories/collections.py +296 -0
  19. dtlpy/repositories/downloader.py +2 -0
  20. dtlpy/repositories/features.py +9 -5
  21. dtlpy/repositories/integrations.py +52 -1
  22. dtlpy/repositories/items.py +10 -3
  23. dtlpy/repositories/pipelines.py +3 -9
  24. dtlpy/repositories/uploader.py +16 -4
  25. dtlpy/services/api_client.py +2 -2
  26. {dtlpy-1.102.14.dist-info → dtlpy-1.104.14.dist-info}/METADATA +2 -2
  27. {dtlpy-1.102.14.dist-info → dtlpy-1.104.14.dist-info}/RECORD +34 -33
  28. dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
  29. {dtlpy-1.102.14.data → dtlpy-1.104.14.data}/scripts/dlp +0 -0
  30. {dtlpy-1.102.14.data → dtlpy-1.104.14.data}/scripts/dlp.bat +0 -0
  31. {dtlpy-1.102.14.data → dtlpy-1.104.14.data}/scripts/dlp.py +0 -0
  32. {dtlpy-1.102.14.dist-info → dtlpy-1.104.14.dist-info}/LICENSE +0 -0
  33. {dtlpy-1.102.14.dist-info → dtlpy-1.104.14.dist-info}/WHEEL +0 -0
  34. {dtlpy-1.102.14.dist-info → dtlpy-1.104.14.dist-info}/entry_points.txt +0 -0
  35. {dtlpy-1.102.14.dist-info → dtlpy-1.104.14.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py CHANGED
@@ -169,6 +169,7 @@ messages = repositories.Messages(client_api=client_api)
169
169
  compositions = repositories.Compositions(client_api=client_api)
170
170
  computes = repositories.Computes(client_api=client_api)
171
171
  service_drivers = repositories.ServiceDrivers(client_api=client_api)
172
+ collections = repositories.Collections(client_api=client_api)
172
173
 
173
174
  try:
174
175
  check_sdk.check(version=__version__, client_api=client_api)
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.102.14'
1
+ version = '1.104.14'
@@ -79,3 +79,4 @@ from .compute import ClusterProvider, ComputeType, ComputeStatus, Toleration, De
79
79
  NodePool, AuthenticationIntegration, Authentication, ComputeCluster, ComputeContext, Compute, KubernetesCompute, \
80
80
  ServiceDriver
81
81
  from .gis_item import ItemGis, Layer
82
+ from .collection import Collection
@@ -7,6 +7,7 @@ import copy
7
7
  import attr
8
8
  import json
9
9
  import os
10
+ import warnings
10
11
 
11
12
  from PIL import Image
12
13
  from enum import Enum
@@ -421,7 +422,7 @@ class Annotation(entities.BaseEntity):
421
422
 
422
423
  @property
423
424
  def attributes(self):
424
- if self._recipe_2_attributes or not self.annotation_definition.attributes:
425
+ if self._recipe_2_attributes is not None or self.annotation_definition.attributes == []:
425
426
  return self._recipe_2_attributes
426
427
  return self.annotation_definition.attributes
427
428
 
@@ -430,6 +431,11 @@ class Annotation(entities.BaseEntity):
430
431
  if isinstance(attributes, dict):
431
432
  self._recipe_2_attributes = attributes
432
433
  elif isinstance(attributes, list):
434
+ warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead. "
435
+ "For more details, refer to the documentation: "
436
+ "https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
437
+ DeprecationWarning,
438
+ )
433
439
  self.annotation_definition.attributes = attributes
434
440
  elif attributes is None:
435
441
  if self._recipe_2_attributes:
@@ -1095,7 +1101,7 @@ class Annotation(entities.BaseEntity):
1095
1101
  if annotation_definition:
1096
1102
  res.annotation_definition = annotation_definition
1097
1103
 
1098
- if annotation_definition and annotation_definition.attributes:
1104
+ if annotation_definition and annotation_definition.attributes is not None:
1099
1105
  res.attributes = annotation_definition.attributes
1100
1106
 
1101
1107
  return res
@@ -1679,7 +1685,7 @@ class FrameAnnotation(entities.BaseEntity):
1679
1685
 
1680
1686
  @property
1681
1687
  def attributes(self):
1682
- if self._recipe_2_attributes or not self.annotation_definition.attributes:
1688
+ if self._recipe_2_attributes or self.annotation_definition.attributes == []:
1683
1689
  return self._recipe_2_attributes
1684
1690
  return self.annotation_definition.attributes
1685
1691
 
@@ -1688,6 +1694,11 @@ class FrameAnnotation(entities.BaseEntity):
1688
1694
  if isinstance(attributes, dict):
1689
1695
  self._recipe_2_attributes = attributes
1690
1696
  elif isinstance(attributes, list):
1697
+ warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead. "
1698
+ "For more details, refer to the documentation: "
1699
+ "https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
1700
+ DeprecationWarning,
1701
+ )
1691
1702
  self.annotation_definition.attributes = attributes
1692
1703
  else:
1693
1704
  raise ValueError('Attributes must be a dictionary or a list')
@@ -1873,6 +1884,9 @@ class FrameAnnotation(entities.BaseEntity):
1873
1884
  'data': self.coordinates
1874
1885
  }
1875
1886
 
1887
+ if self.annotation_definition.description is not None:
1888
+ snapshot_dict['description'] = self.annotation_definition.description
1889
+
1876
1890
  if self.annotation._recipe_2_attributes:
1877
1891
  snapshot_dict['namedAttributes'] = self._recipe_2_attributes
1878
1892
  else:
@@ -1,5 +1,6 @@
1
1
  import logging
2
2
  import numpy as np
3
+ import warnings
3
4
 
4
5
  logger = logging.getLogger(name='dtlpy')
5
6
 
@@ -13,8 +14,12 @@ class BaseAnnotationDefinition:
13
14
  self._right = 0
14
15
  self._annotation = None
15
16
 
16
- if attributes is None:
17
- attributes = list()
17
+ if isinstance(attributes, list) and len(attributes) > 0:
18
+ warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead."
19
+ "For more details, refer to the documentation: "
20
+ "https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
21
+ DeprecationWarning,
22
+ )
18
23
  self._attributes = attributes
19
24
 
20
25
  @property
@@ -23,8 +28,12 @@ class BaseAnnotationDefinition:
23
28
 
24
29
  @attributes.setter
25
30
  def attributes(self, v):
26
- if not isinstance(v, list):
27
- raise ValueError('Failed to update annotation attributes. Please use annotation.attrubeute to set the required values')
31
+ if isinstance(v, list):
32
+ warnings.warn("List attributes are deprecated and will be removed in version 1.109. Use Attribute 2.0 (Dictionary) instead. "
33
+ "For more details, refer to the documentation: "
34
+ "https://developers.dataloop.ai/tutorials/data_management/upload_and_manage_annotations/chapter/#set-attributes-on-annotations",
35
+ DeprecationWarning,
36
+ )
28
37
  self._attributes = v
29
38
  @property
30
39
  def top(self):
@@ -0,0 +1,39 @@
1
+ from .. import entities
2
+ from ..services.api_client import ApiClient
3
+ import attr
4
+
5
+ @attr.s
6
+ class Collection(entities.BaseEntity):
7
+ """
8
+ Represents a collection in the dataset.
9
+ """
10
+
11
+ # sdk
12
+ _client_api = attr.ib(type=ApiClient, repr=False)
13
+
14
+ key = attr.ib(type=str)
15
+ name = attr.ib(type=str)
16
+
17
+ @classmethod
18
+ def from_json(cls, _json, client_api, is_fetched=True):
19
+ """
20
+ Create a single Collection entity from the dataset JSON.
21
+
22
+ :param _json: A dictionary containing collection data in the format:
23
+ { "metadata.system.collections.c0": {"name": "Justice League"} }
24
+ :param client_api: The client API instance.
25
+ :param is_fetched: Whether the entity was fetched from the platform.
26
+ :return: A single Collection entity.
27
+ """
28
+ full_key, value = next(iter(_json.items()))
29
+ # Strip the prefix
30
+ key = full_key.replace("metadata.system.collections.", "")
31
+ name = value.get("name")
32
+
33
+ inst = cls(
34
+ key=key,
35
+ name=name,
36
+ client_api=client_api,
37
+ )
38
+ inst.is_fetched = is_fetched
39
+ return inst
dtlpy/entities/command.py CHANGED
@@ -18,6 +18,8 @@ class CommandsStatus(str, Enum):
18
18
  SUCCESS = 'success'
19
19
  FAILED = 'failed'
20
20
  TIMEOUT = 'timeout'
21
+ CLEANING_UP = 'cleaning-up'
22
+ ON_ERROR = 'on-error'
21
23
 
22
24
 
23
25
  @attr.s
@@ -135,11 +137,14 @@ class Command(entities.BaseEntity):
135
137
  :return: True if command still in progress
136
138
  :rtype: bool
137
139
  """
138
- return self.status in [entities.CommandsStatus.CREATED,
139
- entities.CommandsStatus.MAKING_CHILDREN,
140
- entities.CommandsStatus.WAITING_CHILDREN,
141
- entities.CommandsStatus.FINALIZING,
142
- entities.CommandsStatus.IN_PROGRESS]
140
+ if self.status not in {status for status in entities.CommandsStatus}:
141
+ raise ValueError('status is not a valid CommandsStatus')
142
+ return self.status not in [entities.CommandsStatus.SUCCESS,
143
+ entities.CommandsStatus.FAILED,
144
+ entities.CommandsStatus.TIMEOUT,
145
+ entities.CommandsStatus.CANCELED,
146
+ entities.CommandsStatus.ABORTED
147
+ ]
143
148
 
144
149
  def wait(self, timeout=0, step=None, backoff_factor=1):
145
150
  """
dtlpy/entities/compute.py CHANGED
@@ -230,10 +230,11 @@ class ComputeCluster:
230
230
  devops_output['config']['kubernetesVersion'],
231
231
  ClusterProvider(devops_output['config']['provider']),
232
232
  node_pools,
233
- {},
234
- Authentication(AuthenticationIntegration(integration.id,integration.type))
233
+ {},
234
+ Authentication(AuthenticationIntegration(integration.id, integration.type))
235
235
  )
236
236
 
237
+
237
238
  class ComputeContext:
238
239
  def __init__(self, labels: List[str], org: str, project: Optional[str] = None):
239
240
  self.labels = labels
@@ -376,11 +377,35 @@ class KubernetesCompute(Compute):
376
377
 
377
378
 
378
379
  class ServiceDriver:
379
- def __init__(self, name: str, context: ComputeContext, compute_id: str, client_api: ApiClient):
380
+ def __init__(
381
+ self,
382
+ name: str,
383
+ context: ComputeContext,
384
+ compute_id: str,
385
+ client_api: ApiClient,
386
+ type: ComputeType = None,
387
+ created_at: str = None,
388
+ updated_at: str = None,
389
+ namespace: str = None,
390
+ metadata: Dict = None,
391
+ url: str = None,
392
+ archived: bool = None,
393
+ id: str = None,
394
+ is_cache_available: bool = None
395
+ ):
380
396
  self.name = name
381
397
  self.context = context
382
398
  self.compute_id = compute_id
383
399
  self.client_api = client_api
400
+ self.type = type or ComputeType.KUBERNETES
401
+ self.created_at = created_at
402
+ self.updated_at = updated_at
403
+ self.namespace = namespace
404
+ self.metadata = metadata
405
+ self.url = url
406
+ self.archived = archived
407
+ self.id = id
408
+ self.is_cache_available = is_cache_available
384
409
 
385
410
  @classmethod
386
411
  def from_json(cls, _json, client_api: ApiClient):
@@ -388,12 +413,40 @@ class ServiceDriver:
388
413
  name=_json.get('name'),
389
414
  context=ComputeContext.from_json(_json.get('context', dict())),
390
415
  compute_id=_json.get('computeId'),
391
- client_api=client_api
416
+ client_api=client_api,
417
+ type=_json.get('type', None),
418
+ created_at=_json.get('createdAt', None),
419
+ updated_at=_json.get('updatedAt', None),
420
+ namespace=_json.get('namespace', None),
421
+ metadata=_json.get('metadata', None),
422
+ url=_json.get('url', None),
423
+ archived=_json.get('archived', None),
424
+ id=_json.get('id', None),
425
+ is_cache_available=_json.get('isCacheAvailable', None)
392
426
  )
393
427
 
394
428
  def to_json(self):
395
- return {
429
+ _json = {
396
430
  'name': self.name,
397
431
  'context': self.context.to_json(),
398
- 'computeId': self.compute_id
432
+ 'computeId': self.compute_id,
433
+ 'type': self.type,
399
434
  }
435
+ if self.created_at is not None:
436
+ _json['createdAt'] = self.namespace
437
+ if self.updated_at is not None:
438
+ _json['updatedAt'] = self.updated_at
439
+ if self.namespace is not None:
440
+ _json['namespace'] = self.namespace
441
+ if self.metadata is not None:
442
+ _json['metadata'] = self.metadata
443
+ if self.url is not None:
444
+ _json['url'] = self.url
445
+ if self.archived is not None:
446
+ _json['archived'] = self.archived
447
+ if self.id is not None:
448
+ _json['id'] = self.id
449
+ if self.is_cache_available is not None:
450
+ _json['isCacheAvailable'] = self.is_cache_available
451
+
452
+ return _json
dtlpy/entities/dataset.py CHANGED
@@ -284,13 +284,11 @@ class Dataset(entities.BaseEntity):
284
284
  def set_repositories(self):
285
285
  reps = namedtuple('repositories',
286
286
  field_names=['items', 'recipes', 'datasets', 'assignments', 'tasks', 'annotations',
287
- 'ontologies', 'features', 'settings', 'schema'])
287
+ 'ontologies', 'features', 'settings', 'schema', 'collections'])
288
288
  if self._project is None:
289
289
  datasets = repositories.Datasets(client_api=self._client_api, project=self._project)
290
- features = repositories.Features(client_api=self._client_api, project=self._project)
291
290
  else:
292
291
  datasets = self._project.datasets
293
- features = self._project.features
294
292
 
295
293
  return reps(
296
294
  items=repositories.Items(client_api=self._client_api, dataset=self, datasets=datasets),
@@ -300,9 +298,10 @@ class Dataset(entities.BaseEntity):
300
298
  annotations=repositories.Annotations(client_api=self._client_api, dataset=self),
301
299
  datasets=datasets,
302
300
  ontologies=repositories.Ontologies(client_api=self._client_api, dataset=self),
303
- features=features,
301
+ features=repositories.Features(client_api=self._client_api, project=self._project, dataset=self),
304
302
  settings=repositories.Settings(client_api=self._client_api, dataset=self),
305
- schema=repositories.Schema(client_api=self._client_api, dataset=self)
303
+ schema=repositories.Schema(client_api=self._client_api, dataset=self),
304
+ collections=repositories.Collections(client_api=self._client_api, dataset=self)
306
305
  )
307
306
 
308
307
  @property
@@ -350,6 +349,11 @@ class Dataset(entities.BaseEntity):
350
349
  assert isinstance(self._repositories.features, repositories.Features)
351
350
  return self._repositories.features
352
351
 
352
+ @property
353
+ def collections(self):
354
+ assert isinstance(self._repositories.collections, repositories.Collections)
355
+ return self._repositories.collections
356
+
353
357
  @property
354
358
  def schema(self):
355
359
  assert isinstance(self._repositories.schema, repositories.Schema)
dtlpy/entities/dpk.py CHANGED
@@ -31,8 +31,8 @@ DEFAULT_STOPS = {SlotType.ITEM_VIEWER: {"type": "itemViewer",
31
31
 
32
32
 
33
33
  class Slot(entities.DlEntity):
34
- type = entities.DlProperty(location=['type'], _type=str)
35
- configuration = entities.DlProperty(location=['configuration'], _type=dict)
34
+ type: str = entities.DlProperty(location=['type'], _type=str)
35
+ configuration: dict = entities.DlProperty(location=['configuration'], _type=dict)
36
36
 
37
37
  def to_json(self) -> dict:
38
38
  return self._dict.copy()
@@ -59,10 +59,10 @@ class Toolbar(entities.DlEntity):
59
59
 
60
60
 
61
61
  class Panel(entities.DlEntity):
62
- name = entities.DlProperty(location=['name'], _type=str)
63
- path = entities.DlProperty(location=['path'], _type=str, default=None)
64
- min_role = entities.DlProperty(location=['minRole'], _type=list)
65
- supported_slots = entities.DlProperty(location=['supportedSlots'], _type=list)
62
+ name: str = entities.DlProperty(location=['name'], _type=str)
63
+ path: str = entities.DlProperty(location=['path'], _type=str, default=None)
64
+ min_role: list = entities.DlProperty(location=['minRole'], _type=list)
65
+ supported_slots: list = entities.DlProperty(location=['supportedSlots'], _type=list)
66
66
 
67
67
  metadata = entities.DlProperty(location=['metadata'], _type=list)
68
68
  default_settings = entities.DlProperty(location=['defaultSettings'], _type=list)
@@ -233,15 +233,15 @@ class Dpk(entities.DlEntity):
233
233
  base_id: str = entities.DlProperty(location=['baseId'], _type=str)
234
234
  name: str = entities.DlProperty(location=['name'], _type=str)
235
235
  version: str = entities.DlProperty(location=['version'], _type=str)
236
- attributes: list = entities.DlProperty(location=['attributes'], _type=dict)
236
+ attributes: dict = entities.DlProperty(location=['attributes'], _type=dict)
237
237
  created_at: str = entities.DlProperty(location=['createdAt'], _type=str)
238
238
  updated_at: str = entities.DlProperty(location=['updatedAt'], _type=str)
239
239
  creator: str = entities.DlProperty(location=['creator'], _type=str)
240
240
  display_name: str = entities.DlProperty(location=['displayName'], _type=str)
241
241
  icon: str = entities.DlProperty(location=['icon'], _type=str)
242
242
  tags: list = entities.DlProperty(location=['tags'], _type=list)
243
- codebase: str = entities.DlProperty(location=['codebase'], _kls="Codebase")
244
- scope: dict = entities.DlProperty(location=['scope'], _type=str)
243
+ codebase: Union[entities.Codebase, None] = entities.DlProperty(location=['codebase'], _kls="Codebase")
244
+ scope: str = entities.DlProperty(location=['scope'], _type=str)
245
245
  context: dict = entities.DlProperty(location=['context'], _type=dict)
246
246
  metadata: dict = entities.DlProperty(location=['metadata'], _type=dict)
247
247
  dependencies: dict = entities.DlProperty(location=['dependencies'], _type=List[dict])
@@ -63,6 +63,8 @@ class Execution(entities.BaseEntity):
63
63
  # optional
64
64
  pipeline = attr.ib(type=dict, default=None, repr=False)
65
65
  model = attr.ib(type=dict, default=None, repr=False)
66
+ app = attr.ib(default=None)
67
+ driver_id = attr.ib(default=None)
66
68
 
67
69
  ################
68
70
  # repositories #
@@ -189,6 +191,8 @@ class Execution(entities.BaseEntity):
189
191
  pipeline=_json.get('pipeline', None),
190
192
  model=_json.get('model', None),
191
193
  package_revision=_json.get('packageRevision', None),
194
+ app=_json.get('app', None),
195
+ driver_id=_json.get('driverId', None)
192
196
  )
193
197
  inst.is_fetched = is_fetched
194
198
  return inst
@@ -226,6 +230,7 @@ class Execution(entities.BaseEntity):
226
230
  attr.fields(Execution).pipeline,
227
231
  attr.fields(Execution).model,
228
232
  attr.fields(Execution).package_revision,
233
+ attr.fields(Execution).driver_id,
229
234
  )
230
235
  )
231
236
 
@@ -247,6 +252,7 @@ class Execution(entities.BaseEntity):
247
252
  _json['feedbackQueue'] = self.feedback_queue
248
253
  _json['syncReplyTo '] = self.sync_reply_to
249
254
  _json['packageRevision'] = self.package_revision
255
+ _json['driverId'] = self.driver_id
250
256
 
251
257
  if self.pipeline:
252
258
  _json['pipeline'] = self.pipeline
dtlpy/entities/filters.py CHANGED
@@ -497,8 +497,8 @@ class Filters:
497
497
  _json["resource"] = f'{_json["resource"]}'
498
498
  # convert the dictionary to a json string
499
499
  _json['dqlFilter'] = json.dumps({'filter': _json.pop('filter'),
500
- 'join': _json.pop('join'),
501
- 'sort': _json.get('sort')})
500
+ 'join': _json.pop('join', None),
501
+ 'sort': _json.get('sort', None)})
502
502
  # set the page size as the UI default
503
503
  _json['pageSize'] = 100
504
504
  _json['page'] = _json['page']
@@ -1,5 +1,4 @@
1
1
  from enum import Enum
2
-
3
2
  import logging
4
3
  import attr
5
4
 
dtlpy/entities/item.py CHANGED
@@ -12,6 +12,7 @@ from .annotation import ViewAnnotationOptions, ExportVersion
12
12
  from ..services.api_client import ApiClient
13
13
  from ..services.api_client import client as client_api
14
14
  import json
15
+ from typing import List
15
16
  import requests
16
17
 
17
18
  logger = logging.getLogger(name='dtlpy')
@@ -223,7 +224,7 @@ class Item(entities.BaseEntity):
223
224
  def set_repositories(self):
224
225
  reps = namedtuple('repositories',
225
226
  field_names=['annotations', 'datasets', 'items', 'codebases', 'artifacts', 'modalities',
226
- 'features', 'assignments', 'tasks', 'resource_executions'])
227
+ 'features', 'assignments', 'tasks', 'resource_executions', 'collections'])
227
228
  reps.__new__.__defaults__ = (None, None, None, None, None, None, None, None, None)
228
229
 
229
230
  if self._dataset is None:
@@ -270,7 +271,8 @@ class Item(entities.BaseEntity):
270
271
  client_api=self._client_api,
271
272
  project=self._project,
272
273
  resource=self
273
- )
274
+ ),
275
+ collections=repositories.Collections(client_api=self._client_api, item=self, dataset=self._dataset)
274
276
  )
275
277
  return r
276
278
 
@@ -313,6 +315,11 @@ class Item(entities.BaseEntity):
313
315
  def features(self):
314
316
  assert isinstance(self._repositories.features, repositories.Features)
315
317
  return self._repositories.features
318
+
319
+ @property
320
+ def collections(self):
321
+ assert isinstance(self._repositories.collections, repositories.Collections)
322
+ return self._repositories.collections
316
323
 
317
324
  ##############
318
325
  # Properties #
@@ -770,6 +777,53 @@ class Item(entities.BaseEntity):
770
777
  if tags.get(subset) is True:
771
778
  return subset
772
779
  return None
780
+
781
+ def assign_collection(self, collections: List[str]) -> bool:
782
+ """
783
+ Assign this item to one or more collections.
784
+
785
+ :param collections: List of collection names to assign the item to.
786
+ :return: True if the assignment was successful, otherwise False.
787
+ """
788
+ return self.collections.assign(dataset_id=self.dataset_id, collections=collections, item_id=self.id,)
789
+
790
+ def unassign_collection(self, collections: List[str]) -> bool:
791
+ """
792
+ Unassign this item from one or more collections.
793
+
794
+ :param collections: List of collection names to unassign the item from.
795
+ :return: True if the unassignment was successful, otherwise False.
796
+ """
797
+ return self.collections.unassign(dataset_id=self.dataset_id, item_id=self.id, collections=collections)
798
+
799
+ def list_collections(self) -> List[dict]:
800
+ """
801
+ List all collections associated with this item.
802
+
803
+ :return: A list of dictionaries containing collection keys and their respective names.
804
+ Each dictionary has the structure: {"key": <collection_key>, "name": <collection_name>}.
805
+ """
806
+ collections = self.metadata.get("system", {}).get("collections", {})
807
+ if not isinstance(collections, dict):
808
+ # Ensure collections is a dictionary
809
+ return []
810
+
811
+ # Retrieve collection names by their keys
812
+ return [
813
+ {"key": key, "name": self.collections.get_name_by_key(key)}
814
+ for key in collections.keys()
815
+ ]
816
+
817
+ def list_missing_collections(self) -> List[str]:
818
+ """
819
+ List all items in the dataset that are not assigned to any collection.
820
+
821
+ :return: A list of item IDs that are not part of any collection.
822
+ """
823
+ filters = entities.Filters()
824
+ filters.add(field='metadata.system.collections', values=None)
825
+ filters.add(field='datasetId', values=self._dataset.id)
826
+ return self._dataset.items.list(filters=filters)
773
827
 
774
828
  class ModalityTypeEnum(str, Enum):
775
829
  """
@@ -49,7 +49,7 @@ class Organization(entities.BaseEntity):
49
49
  logo_url = attr.ib(repr=False)
50
50
  plan = attr.ib(repr=False)
51
51
  owner = attr.ib(repr=False)
52
- created_by = attr.ib(repr=False)
52
+ creator = attr.ib(repr=False)
53
53
 
54
54
  # api
55
55
  _client_api = attr.ib(type=ApiClient, repr=False)
@@ -67,7 +67,7 @@ class Organization(entities.BaseEntity):
67
67
 
68
68
  @property
69
69
  def createdBy(self):
70
- return self.created_by
70
+ return self.creator
71
71
 
72
72
  @_repositories.default
73
73
  def set_repositories(self):
@@ -158,7 +158,7 @@ class Organization(entities.BaseEntity):
158
158
  logo_url=_json.get('logoUrl', None),
159
159
  plan=_json.get('plan', None),
160
160
  owner=_json.get('owner', None),
161
- created_by=_json.get('createdBy', None),
161
+ creator=_json.get('creator', None),
162
162
  client_api=client_api)
163
163
  inst.is_fetched = is_fetched
164
164
  return inst
@@ -175,7 +175,7 @@ class Organization(entities.BaseEntity):
175
175
  attr.fields(Organization)._repositories,
176
176
  attr.fields(Organization).created_at,
177
177
  attr.fields(Organization).updated_at,
178
- attr.fields(Organization).created_by,
178
+ attr.fields(Organization).creator,
179
179
  ))
180
180
  output_dict['members'] = self.members
181
181
  output_dict['groups'] = self.groups
@@ -188,7 +188,7 @@ class Organization(entities.BaseEntity):
188
188
  output_dict['logo_url'] = self.logo_url
189
189
  output_dict['plan'] = self.plan
190
190
  output_dict['owner'] = self.owner
191
- output_dict['createdBy'] = self.created_by
191
+ output_dict['creator'] = self.creator
192
192
 
193
193
  return output_dict
194
194
 
@@ -488,7 +488,8 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
488
488
  try:
489
489
  self.model_entity.project.feature_sets.get(feature_set_name=self.model_entity.name)
490
490
  feature_set_name = f"{self.model_entity.name}-{''.join(random.choices(string.ascii_letters + string.digits, k=5))}"
491
- logger.warning(f"Feature set with the model name already exists. Creating new feature set with name {feature_set_name}")
491
+ logger.warning(
492
+ f"Feature set with the model name already exists. Creating new feature set with name {feature_set_name}")
492
493
  except exceptions.NotFound:
493
494
  feature_set_name = self.model_entity.name
494
495
  feature_set = self.model_entity.project.feature_sets.create(name=feature_set_name,
@@ -518,7 +519,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
518
519
  self.logger.debug(
519
520
  "Uploading items' feature vectors for model {!r}.".format(self.model_entity.name))
520
521
  try:
521
- _ = list(pool.map(partial(self._upload_model_features,
522
+ list(pool.map(partial(self._upload_model_features,
522
523
  feature_set.id,
523
524
  self.model_entity.project_id),
524
525
  batch_items,
@@ -762,14 +763,13 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
762
763
  @staticmethod
763
764
  def _upload_model_features(feature_set_id, project_id, item: entities.Item, vector):
764
765
  try:
765
- feature = item.features.create(value=vector,
766
- project_id=project_id,
767
- feature_set_id=feature_set_id,
768
- entity=item)
769
- return feature
766
+ if vector is not None:
767
+ item.features.create(value=vector,
768
+ project_id=project_id,
769
+ feature_set_id=feature_set_id,
770
+ entity=item)
770
771
  except Exception as e:
771
772
  logger.error(f'Failed to upload feature vector of length {len(vector)} to item {item.id}, Error: {e}')
772
- return []
773
773
 
774
774
  def _upload_model_annotations(self, item: entities.Item, predictions, clean_annotations):
775
775
  """
@@ -52,3 +52,4 @@ from .messages import Messages
52
52
  from .compositions import Compositions
53
53
  from .schema import Schema
54
54
  from .computes import Computes, ServiceDrivers
55
+ from .collections import Collections