dtlpy 1.117.6__py3-none-any.whl → 1.118.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.117.6'
1
+ version = '1.118.12'
@@ -1827,7 +1827,7 @@ class FrameAnnotation(entities.BaseEntity):
1827
1827
  return frame
1828
1828
 
1829
1829
  @classmethod
1830
- def from_snapshot(cls, annotation, _json, fps):
1830
+ def from_snapshot(cls, annotation, _json, fps=None):
1831
1831
  """
1832
1832
  new frame state to annotation
1833
1833
 
dtlpy/entities/app.py CHANGED
@@ -93,7 +93,7 @@ class App(entities.BaseEntity):
93
93
  .. code-block:: python
94
94
  succeed = app.uninstall()
95
95
  """
96
- return self.apps.uninstall(self.id)
96
+ return self.apps.uninstall(app=self)
97
97
 
98
98
  def update(self):
99
99
  """
dtlpy/entities/compute.py CHANGED
@@ -13,6 +13,7 @@ class ClusterProvider(str, Enum):
13
13
  LOCAL = 'local'
14
14
  RANCHER_K3S = 'rancher-k3s'
15
15
  RANCHER_RKE = 'rancher-rke'
16
+ OPENSHIFT = 'openshift'
16
17
 
17
18
 
18
19
  class ComputeType(str, Enum):
dtlpy/entities/dataset.py CHANGED
@@ -86,6 +86,9 @@ class Dataset(entities.BaseEntity):
86
86
  # api
87
87
  _client_api = attr.ib(type=ApiClient, repr=False)
88
88
 
89
+ # syncing status
90
+ is_syncing = attr.ib(default=False, repr=False)
91
+
89
92
  # entities
90
93
  _project = attr.ib(default=None, repr=False)
91
94
 
@@ -183,6 +186,7 @@ class Dataset(entities.BaseEntity):
183
186
  expiration_options=expiration_options,
184
187
  index_driver=_json.get('indexDriver', None),
185
188
  enable_sync_with_cloned=_json.get('enableSyncWithCloned', None),
189
+ is_syncing=_json.get('isSyncing', False),
186
190
  src_dataset=_json.get('srcDataset', None))
187
191
  inst.is_fetched = is_fetched
188
192
  return inst
@@ -215,6 +219,7 @@ class Dataset(entities.BaseEntity):
215
219
  attr.fields(Dataset).items_count,
216
220
  attr.fields(Dataset).index_driver,
217
221
  attr.fields(Dataset).enable_sync_with_cloned,
222
+ attr.fields(Dataset).is_syncing,
218
223
  attr.fields(Dataset).src_dataset,
219
224
  ))
220
225
  _json.update({'items': self.items_url})
@@ -231,6 +236,7 @@ class Dataset(entities.BaseEntity):
231
236
  _json['expirationOptions'] = self.expiration_options.to_json()
232
237
  if self.enable_sync_with_cloned is not None:
233
238
  _json['enableSyncWithCloned'] = self.enable_sync_with_cloned
239
+ _json['isSyncing'] = self.is_syncing
234
240
  if self.src_dataset is not None:
235
241
  _json['srcDataset'] = self.src_dataset
236
242
  return _json
@@ -288,12 +294,15 @@ class Dataset(entities.BaseEntity):
288
294
  def set_repositories(self):
289
295
  reps = namedtuple('repositories',
290
296
  field_names=['items', 'recipes', 'datasets', 'assignments', 'tasks', 'annotations',
291
- 'ontologies', 'features', 'settings', 'schema', 'collections'])
297
+ 'ontologies', 'features', 'feature_sets', 'settings', 'schema', 'collections'])
298
+ _project_id = None
292
299
  if self._project is None:
293
300
  datasets = repositories.Datasets(client_api=self._client_api, project=self._project)
301
+ if self.projects is not None and len(self.projects) > 0:
302
+ _project_id = self.projects[0]
294
303
  else:
295
304
  datasets = self._project.datasets
296
-
305
+ _project_id = self._project.id
297
306
  return reps(
298
307
  items=repositories.Items(client_api=self._client_api, dataset=self, datasets=datasets),
299
308
  recipes=repositories.Recipes(client_api=self._client_api, dataset=self),
@@ -303,6 +312,7 @@ class Dataset(entities.BaseEntity):
303
312
  datasets=datasets,
304
313
  ontologies=repositories.Ontologies(client_api=self._client_api, dataset=self),
305
314
  features=repositories.Features(client_api=self._client_api, project=self._project, dataset=self),
315
+ feature_sets=repositories.FeatureSets(client_api=self._client_api, project=self._project, project_id=_project_id, dataset=self),
306
316
  settings=repositories.Settings(client_api=self._client_api, dataset=self),
307
317
  schema=repositories.Schema(client_api=self._client_api, dataset=self),
308
318
  collections=repositories.Collections(client_api=self._client_api, dataset=self)
@@ -353,6 +363,11 @@ class Dataset(entities.BaseEntity):
353
363
  assert isinstance(self._repositories.features, repositories.Features)
354
364
  return self._repositories.features
355
365
 
366
+ @property
367
+ def feature_sets(self):
368
+ assert isinstance(self._repositories.feature_sets, repositories.FeatureSets)
369
+ return self._repositories.feature_sets
370
+
356
371
  @property
357
372
  def collections(self):
358
373
  assert isinstance(self._repositories.collections, repositories.Collections)
dtlpy/entities/model.py CHANGED
@@ -423,7 +423,7 @@ class Model(entities.BaseEntity):
423
423
  # default
424
424
  if 'id_to_label_map' not in self.configuration:
425
425
  if not (self.dataset_id == 'null' or self.dataset_id is None):
426
- self.labels = [label.tag for label in self.dataset.labels]
426
+ self.labels = [flat_key for flat_key, _ in self.dataset.labels_flat_dict.items()]
427
427
  self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in enumerate(self.labels)}
428
428
  # use existing
429
429
  else:
@@ -9,7 +9,7 @@ from typing import Optional, List, Any
9
9
  import attr
10
10
 
11
11
  from .filters import FiltersOperations, FiltersOrderByDirection, FiltersResource
12
- from .. import miscellaneous
12
+ from .. import miscellaneous, exceptions
13
13
  from ..services.api_client import ApiClient
14
14
 
15
15
  logger = logging.getLogger(name='dtlpy')
@@ -243,8 +243,12 @@ class PagedEntities:
243
243
  :param page_offset: page offset (for offset-based)
244
244
  :param page_size: page size
245
245
  """
246
- items = self.return_page(page_offset=page_offset, page_size=page_size)
247
- self.items = items
246
+ try:
247
+ items = self.return_page(page_offset=page_offset, page_size=page_size)
248
+ self.items = items
249
+ except exceptions.BadRequest as e:
250
+ logger.warning(f"BadRequest error received: {str(e)}")
251
+ self.items = miscellaneous.List(list())
248
252
 
249
253
  def next_page(self) -> None:
250
254
  """
dtlpy/entities/service.py CHANGED
@@ -142,7 +142,7 @@ class KubernetesRuntime(ServiceRuntime):
142
142
  num_replicas=DEFAULT_NUM_REPLICAS,
143
143
  concurrency=DEFAULT_CONCURRENCY,
144
144
  dynamic_concurrency=None,
145
- concurrency_update_method=None,
145
+ dynamic_concurrency_config=None,
146
146
  runner_image=None,
147
147
  autoscaler=None,
148
148
  **kwargs):
@@ -156,7 +156,7 @@ class KubernetesRuntime(ServiceRuntime):
156
156
  self.single_agent = kwargs.get('singleAgent', None)
157
157
  self.preemptible = kwargs.get('preemptible', None)
158
158
  self.dynamic_concurrency = kwargs.get('dynamicConcurrency', dynamic_concurrency)
159
- self.concurrency_update_method = kwargs.get('concurrencyUpdateMethod', concurrency_update_method)
159
+ self.dynamic_concurrency_config = kwargs.get('dynamicConcurrencyConfig', dynamic_concurrency_config)
160
160
 
161
161
  self.autoscaler = kwargs.get('autoscaler', autoscaler)
162
162
  if self.autoscaler is not None and isinstance(self.autoscaler, dict):
@@ -191,8 +191,8 @@ class KubernetesRuntime(ServiceRuntime):
191
191
  if self.dynamic_concurrency is not None:
192
192
  _json['dynamicConcurrency'] = self.dynamic_concurrency
193
193
 
194
- if self.concurrency_update_method is not None:
195
- _json['concurrencyUpdateMethod'] = self.concurrency_update_method
194
+ if self.dynamic_concurrency_config is not None:
195
+ _json['dynamicConcurrencyConfig'] = self.dynamic_concurrency_config
196
196
 
197
197
  return _json
198
198
 
@@ -472,31 +472,32 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
472
472
  self.logger.debug("Downloading subset {!r} of {}".format(subset, self.model_entity.dataset.name))
473
473
 
474
474
  annotation_filters = None
475
- if subset in annotations_subsets:
476
- annotation_filters = entities.Filters(
477
- use_defaults=False,
478
- resource=entities.FiltersResource.ANNOTATION,
479
- custom_filter=annotations_subsets[subset],
480
- )
481
- # if user provided annotation_filters, skip the default filters
482
- elif self.model_entity.output_type is not None and self.model_entity.output_type != "embedding":
483
- annotation_filters = entities.Filters(resource=entities.FiltersResource.ANNOTATION, use_defaults=False)
484
- if self.model_entity.output_type in [
485
- entities.AnnotationType.SEGMENTATION,
486
- entities.AnnotationType.POLYGON,
487
- ]:
488
- model_output_types = [entities.AnnotationType.SEGMENTATION, entities.AnnotationType.POLYGON]
489
- else:
490
- model_output_types = [self.model_entity.output_type]
491
-
492
- annotation_filters.add(
493
- field=entities.FiltersKnownFields.TYPE,
494
- values=model_output_types,
495
- operator=entities.FiltersOperations.IN,
496
- )
475
+ if self.model_entity.output_type != "embedding":
476
+ if subset in annotations_subsets:
477
+ annotation_filters = entities.Filters(
478
+ use_defaults=False,
479
+ resource=entities.FiltersResource.ANNOTATION,
480
+ custom_filter=annotations_subsets[subset],
481
+ )
482
+ # if user provided annotation_filters, skip the default filters
483
+ elif self.model_entity.output_type is not None:
484
+ annotation_filters = entities.Filters(resource=entities.FiltersResource.ANNOTATION, use_defaults=False)
485
+ if self.model_entity.output_type in [
486
+ entities.AnnotationType.SEGMENTATION,
487
+ entities.AnnotationType.POLYGON,
488
+ ]:
489
+ model_output_types = [entities.AnnotationType.SEGMENTATION, entities.AnnotationType.POLYGON]
490
+ else:
491
+ model_output_types = [self.model_entity.output_type]
492
+
493
+ annotation_filters.add(
494
+ field=entities.FiltersKnownFields.TYPE,
495
+ values=model_output_types,
496
+ operator=entities.FiltersOperations.IN,
497
+ )
497
498
 
498
- annotation_filters = self.__include_model_annotations(annotation_filters)
499
- annotations_subsets[subset] = annotation_filters.prepare()
499
+ annotation_filters = self.__include_model_annotations(annotation_filters)
500
+ annotations_subsets[subset] = annotation_filters.prepare()
500
501
 
501
502
  ret_list = self.__download_items(
502
503
  dataset=dataset,
@@ -709,7 +710,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
709
710
  valid_vectors = []
710
711
  items_to_upload = []
711
712
  vectors_to_upload = []
712
-
713
+
713
714
  for item, vector in zip(_items, vectors):
714
715
  # Check if vector is valid
715
716
  if vector is None or len(vector) != embeddings_size:
@@ -719,25 +720,25 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
719
720
  # Item and vector are valid
720
721
  valid_items.append(item)
721
722
  valid_vectors.append(vector)
722
-
723
+
723
724
  # Check if item should be skipped (prompt items)
724
725
  _system_metadata = getattr(item, 'system', dict())
725
726
  is_prompt = _system_metadata.get('shebang', dict()).get('dltype', '') == 'prompt'
726
727
  if skip_default_items and is_prompt:
727
728
  self.logger.debug(f"Skipping feature upload for prompt item {item.id}")
728
729
  continue
729
-
730
+
730
731
  # Items were not skipped - should be uploaded
731
732
  items_to_upload.append(item)
732
733
  vectors_to_upload.append(vector)
733
-
734
+
734
735
  # Update the original lists with valid items only
735
736
  _items[:] = valid_items
736
737
  vectors[:] = valid_vectors
737
-
738
+
738
739
  if len(_items) != len(vectors):
739
740
  raise ValueError(f"The number of items ({len(_items)}) is not equal to the number of vectors ({len(vectors)}).")
740
-
741
+
741
742
  self.logger.debug(f"Uploading {len(items_to_upload)} items' feature vectors for model {self.model_entity.name}.")
742
743
  try:
743
744
  start_time = time.time()
@@ -830,7 +831,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
830
831
  logger.info("Received {s} for training".format(s=model.id))
831
832
  model = model.wait_for_model_ready()
832
833
  if model.status == 'failed':
833
- raise ValueError("Model is in failed state, cannot train.")
834
+ logger.warning("Model failed. New training will attempt to resume from previous checkpoints.")
834
835
 
835
836
  ##############
836
837
  # Set status #
@@ -272,15 +272,16 @@ class Apps:
272
272
 
273
273
  return app
274
274
 
275
- def uninstall(self, app_id: str = None, app_name: str = None, wait: bool = True) -> bool:
275
+ def uninstall(self, app_id: str = None, app_name: str = None, wait: bool = True, app: entities.App = None) -> bool:
276
276
  """
277
277
  Delete an app entity.
278
278
 
279
279
  Note: You are required to add either app_id or app_name.
280
280
 
281
281
  :param str app_id: optional - the id of the app.
282
- :param str app_name: optional - the name of the app.
282
+ :param str app_name: [DEPRECATED] - the name of the app.
283
283
  :param bool wait: optional - wait for the operation to finish.
284
+ :param entities.App app: optional - the app entity.
284
285
  :return whether we succeed uninstalling the specified app.
285
286
  :rtype bool
286
287
 
@@ -288,12 +289,12 @@ class Apps:
288
289
  .. code-block:: python
289
290
  # succeed = dl.apps.delete(app_id='app_id')
290
291
  """
291
- if app_id is None and app_name is None:
292
+ if app is None and app_id is None:
292
293
  raise exceptions.PlatformException(
293
294
  error='400',
294
295
  message='You must provide an identifier in inputs')
295
- if app_name is not None:
296
- app = self.__get_by_name(app_name)
296
+
297
+ if app is not None:
297
298
  app_id = app.id
298
299
 
299
300
  success, response = self._client_api.gen_request(req_type='delete', path='/apps/{}'.format(app_id))
@@ -302,18 +303,16 @@ class Apps:
302
303
 
303
304
  try:
304
305
  app = self.get(app_id=app_id)
305
- except Exception as e:
306
- if e.status_code == '404':
307
- return success
308
- else:
309
- raise e
310
- if app.metadata:
306
+ except exceptions.NotFound:
307
+ return success
308
+
309
+ if wait and app.status == entities.CompositionStatus.TERMINATING and app.metadata is not None:
311
310
  command_id = app.metadata.get('system', {}).get('commands', {}).get('uninstall', None)
312
- if wait and app.status == entities.CompositionStatus.TERMINATING and command_id is not None:
311
+ if command_id is not None:
313
312
  command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
314
313
  command.wait()
315
314
 
316
- logger.debug(f"App deleted successfully (id: {app_id}, name: {app_name}")
315
+ logger.debug(f"App deleted successfully (id: {app.id}, name: {app.name}")
317
316
  return success
318
317
 
319
318
  def resume(self, app: entities.App = None, app_id: str = None) -> bool: