dtlpy 1.116.6__py3-none-any.whl → 1.118.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dtlpy/__init__.py CHANGED
@@ -108,7 +108,7 @@ from .entities import (
108
108
  # compute
109
109
  ClusterProvider, ComputeType, ComputeStatus, Toleration, DeploymentResource, DeploymentResources,
110
110
  NodePool, AuthenticationIntegration, Authentication, ComputeCluster, ComputeContext, Compute, KubernetesCompute,
111
- ServiceDriver, ExportType, OutputExportType
111
+ ServiceDriver, ExportType, OutputExportType, DynamicConcurrencyUpdateMethod
112
112
  )
113
113
  from .ml import BaseModelAdapter
114
114
  from .utilities import Converter, BaseServiceRunner, Progress, Context, AnnotationFormat
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.116.6'
1
+ version = '1.118.12'
@@ -44,7 +44,7 @@ from .package_slot import PackageSlot, SlotPostAction, SlotPostActionType, SlotD
44
44
  from .package_function import PackageFunction, FunctionIO, PackageInputType
45
45
  from .time_series import TimeSeries
46
46
  from .service import Service, KubernetesAutoscalerType, KubernetesRabbitmqAutoscaler, KubernetesAutoscaler, KubernetesRPSAutoscaler, \
47
- InstanceCatalog, KubernetesRuntime, ServiceType, ServiceModeType
47
+ InstanceCatalog, KubernetesRuntime, ServiceType, ServiceModeType, DynamicConcurrencyUpdateMethod
48
48
  from .execution import Execution, ExecutionStatus
49
49
  from .command import Command, CommandsStatus
50
50
  from .assignment import Assignment, Workload, WorkloadUnit
@@ -1827,7 +1827,7 @@ class FrameAnnotation(entities.BaseEntity):
1827
1827
  return frame
1828
1828
 
1829
1829
  @classmethod
1830
- def from_snapshot(cls, annotation, _json, fps):
1830
+ def from_snapshot(cls, annotation, _json, fps=None):
1831
1831
  """
1832
1832
  new frame state to annotation
1833
1833
 
dtlpy/entities/app.py CHANGED
@@ -93,7 +93,7 @@ class App(entities.BaseEntity):
93
93
  .. code-block:: python
94
94
  succeed = app.uninstall()
95
95
  """
96
- return self.apps.uninstall(self.id)
96
+ return self.apps.uninstall(app=self)
97
97
 
98
98
  def update(self):
99
99
  """
dtlpy/entities/compute.py CHANGED
@@ -13,6 +13,7 @@ class ClusterProvider(str, Enum):
13
13
  LOCAL = 'local'
14
14
  RANCHER_K3S = 'rancher-k3s'
15
15
  RANCHER_RKE = 'rancher-rke'
16
+ OPENSHIFT = 'openshift'
16
17
 
17
18
 
18
19
  class ComputeType(str, Enum):
dtlpy/entities/dataset.py CHANGED
@@ -86,6 +86,9 @@ class Dataset(entities.BaseEntity):
86
86
  # api
87
87
  _client_api = attr.ib(type=ApiClient, repr=False)
88
88
 
89
+ # syncing status
90
+ is_syncing = attr.ib(default=False, repr=False)
91
+
89
92
  # entities
90
93
  _project = attr.ib(default=None, repr=False)
91
94
 
@@ -183,6 +186,7 @@ class Dataset(entities.BaseEntity):
183
186
  expiration_options=expiration_options,
184
187
  index_driver=_json.get('indexDriver', None),
185
188
  enable_sync_with_cloned=_json.get('enableSyncWithCloned', None),
189
+ is_syncing=_json.get('isSyncing', False),
186
190
  src_dataset=_json.get('srcDataset', None))
187
191
  inst.is_fetched = is_fetched
188
192
  return inst
@@ -215,6 +219,7 @@ class Dataset(entities.BaseEntity):
215
219
  attr.fields(Dataset).items_count,
216
220
  attr.fields(Dataset).index_driver,
217
221
  attr.fields(Dataset).enable_sync_with_cloned,
222
+ attr.fields(Dataset).is_syncing,
218
223
  attr.fields(Dataset).src_dataset,
219
224
  ))
220
225
  _json.update({'items': self.items_url})
@@ -231,6 +236,7 @@ class Dataset(entities.BaseEntity):
231
236
  _json['expirationOptions'] = self.expiration_options.to_json()
232
237
  if self.enable_sync_with_cloned is not None:
233
238
  _json['enableSyncWithCloned'] = self.enable_sync_with_cloned
239
+ _json['isSyncing'] = self.is_syncing
234
240
  if self.src_dataset is not None:
235
241
  _json['srcDataset'] = self.src_dataset
236
242
  return _json
@@ -288,12 +294,15 @@ class Dataset(entities.BaseEntity):
288
294
  def set_repositories(self):
289
295
  reps = namedtuple('repositories',
290
296
  field_names=['items', 'recipes', 'datasets', 'assignments', 'tasks', 'annotations',
291
- 'ontologies', 'features', 'settings', 'schema', 'collections'])
297
+ 'ontologies', 'features', 'feature_sets', 'settings', 'schema', 'collections'])
298
+ _project_id = None
292
299
  if self._project is None:
293
300
  datasets = repositories.Datasets(client_api=self._client_api, project=self._project)
301
+ if self.projects is not None and len(self.projects) > 0:
302
+ _project_id = self.projects[0]
294
303
  else:
295
304
  datasets = self._project.datasets
296
-
305
+ _project_id = self._project.id
297
306
  return reps(
298
307
  items=repositories.Items(client_api=self._client_api, dataset=self, datasets=datasets),
299
308
  recipes=repositories.Recipes(client_api=self._client_api, dataset=self),
@@ -303,6 +312,7 @@ class Dataset(entities.BaseEntity):
303
312
  datasets=datasets,
304
313
  ontologies=repositories.Ontologies(client_api=self._client_api, dataset=self),
305
314
  features=repositories.Features(client_api=self._client_api, project=self._project, dataset=self),
315
+ feature_sets=repositories.FeatureSets(client_api=self._client_api, project=self._project, project_id=_project_id, dataset=self),
306
316
  settings=repositories.Settings(client_api=self._client_api, dataset=self),
307
317
  schema=repositories.Schema(client_api=self._client_api, dataset=self),
308
318
  collections=repositories.Collections(client_api=self._client_api, dataset=self)
@@ -353,6 +363,11 @@ class Dataset(entities.BaseEntity):
353
363
  assert isinstance(self._repositories.features, repositories.Features)
354
364
  return self._repositories.features
355
365
 
366
+ @property
367
+ def feature_sets(self):
368
+ assert isinstance(self._repositories.feature_sets, repositories.FeatureSets)
369
+ return self._repositories.feature_sets
370
+
356
371
  @property
357
372
  def collections(self):
358
373
  assert isinstance(self._repositories.collections, repositories.Collections)
@@ -135,6 +135,13 @@ class FeatureSet(entities.BaseEntity):
135
135
 
136
136
  return _json
137
137
 
138
+ def update(self):
139
+ """
140
+ Update feature set
141
+
142
+ :return: entities.FeatureSet
143
+ """
144
+ return self.feature_sets.update(self)
138
145
  def delete(self):
139
146
  """
140
147
  Delete the feature set
dtlpy/entities/item.py CHANGED
@@ -80,6 +80,22 @@ class Item(entities.BaseEntity):
80
80
  def datasetId(self):
81
81
  return self.dataset_id
82
82
 
83
+ @property
84
+ def resolved_stream(self):
85
+ stream = self.metadata.get('system', dict()).get('shebang', dict()).get('linkInfo', dict()).get('ref', None)
86
+ if stream is None:
87
+ stream = self.stream
88
+ api_url = self._client_api.environment
89
+ if api_url != self._client_api.base_gate_url:
90
+ stream = stream.replace(api_url, self._client_api.base_gate_url)
91
+ else:
92
+ link_item_url_override = os.environ.get('LINK_ITEM_URL_OVERRIDE', None)
93
+ if link_item_url_override is not None:
94
+ src, target = link_item_url_override.split(',')
95
+ stream = stream.replace(src, target)
96
+
97
+ return stream
98
+
83
99
  @staticmethod
84
100
  def _protected_from_json(_json, client_api, dataset=None):
85
101
  """
dtlpy/entities/model.py CHANGED
@@ -423,7 +423,7 @@ class Model(entities.BaseEntity):
423
423
  # default
424
424
  if 'id_to_label_map' not in self.configuration:
425
425
  if not (self.dataset_id == 'null' or self.dataset_id is None):
426
- self.labels = [label.tag for label in self.dataset.labels]
426
+ self.labels = [flat_key for flat_key, _ in self.dataset.labels_flat_dict.items()]
427
427
  self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in enumerate(self.labels)}
428
428
  # use existing
429
429
  else:
@@ -290,7 +290,7 @@ class Ontology(entities.BaseEntity):
290
290
 
291
291
  def update(self, system_metadata=False):
292
292
  """
293
- Update items metadata
293
+ Update Ontology attribute
294
294
 
295
295
  :param bool system_metadata: bool - True, if you want to change metadata system
296
296
  :return: Ontology object
@@ -9,7 +9,7 @@ from typing import Optional, List, Any
9
9
  import attr
10
10
 
11
11
  from .filters import FiltersOperations, FiltersOrderByDirection, FiltersResource
12
- from .. import miscellaneous
12
+ from .. import miscellaneous, exceptions
13
13
  from ..services.api_client import ApiClient
14
14
 
15
15
  logger = logging.getLogger(name='dtlpy')
@@ -243,8 +243,12 @@ class PagedEntities:
243
243
  :param page_offset: page offset (for offset-based)
244
244
  :param page_size: page size
245
245
  """
246
- items = self.return_page(page_offset=page_offset, page_size=page_size)
247
- self.items = items
246
+ try:
247
+ items = self.return_page(page_offset=page_offset, page_size=page_size)
248
+ self.items = items
249
+ except exceptions.BadRequest as e:
250
+ logger.warning(f"BadRequest error received: {str(e)}")
251
+ self.items = miscellaneous.List(list())
248
252
 
249
253
  def next_page(self) -> None:
250
254
  """
dtlpy/entities/service.py CHANGED
@@ -11,6 +11,12 @@ from ..services.api_client import ApiClient
11
11
 
12
12
  logger = logging.getLogger(name='dtlpy')
13
13
 
14
+ class DynamicConcurrencyUpdateMethod(str, Enum):
15
+ """ The method of updating the dynamic concurrency.
16
+ """
17
+ RESTART = 'restart',
18
+ SYNC = 'sync'
19
+
14
20
 
15
21
  class ServiceType(str, Enum):
16
22
  """ The type of the service (SYSTEM).
@@ -136,6 +142,7 @@ class KubernetesRuntime(ServiceRuntime):
136
142
  num_replicas=DEFAULT_NUM_REPLICAS,
137
143
  concurrency=DEFAULT_CONCURRENCY,
138
144
  dynamic_concurrency=None,
145
+ dynamic_concurrency_config=None,
139
146
  runner_image=None,
140
147
  autoscaler=None,
141
148
  **kwargs):
@@ -149,6 +156,7 @@ class KubernetesRuntime(ServiceRuntime):
149
156
  self.single_agent = kwargs.get('singleAgent', None)
150
157
  self.preemptible = kwargs.get('preemptible', None)
151
158
  self.dynamic_concurrency = kwargs.get('dynamicConcurrency', dynamic_concurrency)
159
+ self.dynamic_concurrency_config = kwargs.get('dynamicConcurrencyConfig', dynamic_concurrency_config)
152
160
 
153
161
  self.autoscaler = kwargs.get('autoscaler', autoscaler)
154
162
  if self.autoscaler is not None and isinstance(self.autoscaler, dict):
@@ -183,6 +191,9 @@ class KubernetesRuntime(ServiceRuntime):
183
191
  if self.dynamic_concurrency is not None:
184
192
  _json['dynamicConcurrency'] = self.dynamic_concurrency
185
193
 
194
+ if self.dynamic_concurrency_config is not None:
195
+ _json['dynamicConcurrencyConfig'] = self.dynamic_concurrency_config
196
+
186
197
  return _json
187
198
 
188
199
 
@@ -368,14 +368,41 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
368
368
  annotation_filters.custom_filter['filter']['$and'].append({'metadata.system.model.name': {'$exists': False}})
369
369
  return annotation_filters
370
370
 
371
+ def __download_items(self, dataset, filters, local_path, annotation_options, annotation_filters=None):
372
+ """
373
+ Download items from dataset with optional annotation filters.
374
+
375
+ :param dataset: Dataset to download from
376
+ :param filters: Filters to apply
377
+ :param local_path: Local path to save files
378
+ :param annotation_options: Annotation download options
379
+ :param annotation_filters: Optional filters for annotations
380
+ :return: List of downloaded items
381
+ """
382
+ if annotation_options == entities.ViewAnnotationOptions.JSON:
383
+ downloader = repositories.Downloader(dataset.items)
384
+ return downloader._download_recursive(
385
+ local_path=local_path,
386
+ filters=filters,
387
+ annotation_filters=annotation_filters
388
+ )
389
+ else:
390
+ return dataset.items.download(
391
+ filters=filters,
392
+ local_path=local_path,
393
+ annotation_options=annotation_options,
394
+ annotation_filters=annotation_filters
395
+ )
396
+
371
397
  def __download_background_images(self, filters, data_subset_base_path, annotation_options):
372
398
  background_list = list()
373
399
  if self.configuration.get('include_background', False) is True:
374
400
  filters.custom_filter["filter"]["$and"].append({"annotated": False})
375
- background_list = self.model_entity.dataset.items.download(
401
+ background_list = self.__download_items(
402
+ dataset=self.model_entity.dataset,
376
403
  filters=filters,
377
404
  local_path=data_subset_base_path,
378
- annotation_options=annotation_options,
405
+ annotation_options=annotation_options
379
406
  )
380
407
  return background_list
381
408
 
@@ -434,49 +461,53 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
434
461
  if subsets is None:
435
462
  raise ValueError("Model (id: {}) must have subsets in metadata.system.subsets".format(self.model_entity.id))
436
463
  for subset, filters_dict in subsets.items():
464
+ _filters_dict = filters_dict.copy()
437
465
  data_subset_base_path = os.path.join(data_path, subset)
438
466
  if os.path.isdir(data_subset_base_path) and not overwrite:
439
467
  # existing and dont overwrite
440
468
  self.logger.debug("Subset {!r} already exists (and overwrite=False). Skipping.".format(subset))
441
469
  continue
442
470
 
443
- filters = entities.Filters(custom_filter=filters_dict)
471
+ filters = entities.Filters(custom_filter=_filters_dict)
444
472
  self.logger.debug("Downloading subset {!r} of {}".format(subset, self.model_entity.dataset.name))
445
473
 
446
474
  annotation_filters = None
447
- if subset in annotations_subsets:
448
- annotation_filters = entities.Filters(
449
- use_defaults=False,
450
- resource=entities.FiltersResource.ANNOTATION,
451
- custom_filter=annotations_subsets[subset],
452
- )
453
- # if user provided annotation_filters, skip the default filters
454
- elif self.model_entity.output_type is not None and self.model_entity.output_type != "embedding":
455
- annotation_filters = entities.Filters(resource=entities.FiltersResource.ANNOTATION, use_defaults=False)
456
- if self.model_entity.output_type in [
457
- entities.AnnotationType.SEGMENTATION,
458
- entities.AnnotationType.POLYGON,
459
- ]:
460
- model_output_types = [entities.AnnotationType.SEGMENTATION, entities.AnnotationType.POLYGON]
461
- else:
462
- model_output_types = [self.model_entity.output_type]
463
-
464
- annotation_filters.add(
465
- field=entities.FiltersKnownFields.TYPE,
466
- values=model_output_types,
467
- operator=entities.FiltersOperations.IN,
468
- )
475
+ if self.model_entity.output_type != "embedding":
476
+ if subset in annotations_subsets:
477
+ annotation_filters = entities.Filters(
478
+ use_defaults=False,
479
+ resource=entities.FiltersResource.ANNOTATION,
480
+ custom_filter=annotations_subsets[subset],
481
+ )
482
+ # if user provided annotation_filters, skip the default filters
483
+ elif self.model_entity.output_type is not None:
484
+ annotation_filters = entities.Filters(resource=entities.FiltersResource.ANNOTATION, use_defaults=False)
485
+ if self.model_entity.output_type in [
486
+ entities.AnnotationType.SEGMENTATION,
487
+ entities.AnnotationType.POLYGON,
488
+ ]:
489
+ model_output_types = [entities.AnnotationType.SEGMENTATION, entities.AnnotationType.POLYGON]
490
+ else:
491
+ model_output_types = [self.model_entity.output_type]
492
+
493
+ annotation_filters.add(
494
+ field=entities.FiltersKnownFields.TYPE,
495
+ values=model_output_types,
496
+ operator=entities.FiltersOperations.IN,
497
+ )
469
498
 
470
- annotation_filters = self.__include_model_annotations(annotation_filters)
471
- annotations_subsets[subset] = annotation_filters.prepare()
499
+ annotation_filters = self.__include_model_annotations(annotation_filters)
500
+ annotations_subsets[subset] = annotation_filters.prepare()
472
501
 
473
- ret_list = dataset.items.download(
502
+ ret_list = self.__download_items(
503
+ dataset=dataset,
474
504
  filters=filters,
475
505
  local_path=data_subset_base_path,
476
506
  annotation_options=annotation_options,
477
- annotation_filters=annotation_filters,
507
+ annotation_filters=annotation_filters
478
508
  )
479
- filters = entities.Filters(custom_filter=subsets[subset])
509
+ _filters_dict = subsets[subset].copy()
510
+ filters = entities.Filters(custom_filter=_filters_dict)
480
511
  background_ret_list = self.__download_background_images(
481
512
  filters=filters,
482
513
  data_subset_base_path=data_subset_base_path,
@@ -679,7 +710,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
679
710
  valid_vectors = []
680
711
  items_to_upload = []
681
712
  vectors_to_upload = []
682
-
713
+
683
714
  for item, vector in zip(_items, vectors):
684
715
  # Check if vector is valid
685
716
  if vector is None or len(vector) != embeddings_size:
@@ -689,25 +720,25 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
689
720
  # Item and vector are valid
690
721
  valid_items.append(item)
691
722
  valid_vectors.append(vector)
692
-
723
+
693
724
  # Check if item should be skipped (prompt items)
694
725
  _system_metadata = getattr(item, 'system', dict())
695
726
  is_prompt = _system_metadata.get('shebang', dict()).get('dltype', '') == 'prompt'
696
727
  if skip_default_items and is_prompt:
697
728
  self.logger.debug(f"Skipping feature upload for prompt item {item.id}")
698
729
  continue
699
-
730
+
700
731
  # Items were not skipped - should be uploaded
701
732
  items_to_upload.append(item)
702
733
  vectors_to_upload.append(vector)
703
-
734
+
704
735
  # Update the original lists with valid items only
705
736
  _items[:] = valid_items
706
737
  vectors[:] = valid_vectors
707
-
738
+
708
739
  if len(_items) != len(vectors):
709
740
  raise ValueError(f"The number of items ({len(_items)}) is not equal to the number of vectors ({len(vectors)}).")
710
-
741
+
711
742
  self.logger.debug(f"Uploading {len(items_to_upload)} items' feature vectors for model {self.model_entity.name}.")
712
743
  try:
713
744
  start_time = time.time()
@@ -800,7 +831,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
800
831
  logger.info("Received {s} for training".format(s=model.id))
801
832
  model = model.wait_for_model_ready()
802
833
  if model.status == 'failed':
803
- raise ValueError("Model is in failed state, cannot train.")
834
+ logger.warning("Model failed. New training will attempt to resume from previous checkpoints.")
804
835
 
805
836
  ##############
806
837
  # Set status #
@@ -272,15 +272,16 @@ class Apps:
272
272
 
273
273
  return app
274
274
 
275
- def uninstall(self, app_id: str = None, app_name: str = None, wait: bool = True) -> bool:
275
+ def uninstall(self, app_id: str = None, app_name: str = None, wait: bool = True, app: entities.App = None) -> bool:
276
276
  """
277
277
  Delete an app entity.
278
278
 
279
279
  Note: You are required to add either app_id or app_name.
280
280
 
281
281
  :param str app_id: optional - the id of the app.
282
- :param str app_name: optional - the name of the app.
282
+ :param str app_name: [DEPRECATED] - the name of the app.
283
283
  :param bool wait: optional - wait for the operation to finish.
284
+ :param entities.App app: optional - the app entity.
284
285
  :return whether we succeed uninstalling the specified app.
285
286
  :rtype bool
286
287
 
@@ -288,12 +289,12 @@ class Apps:
288
289
  .. code-block:: python
289
290
  # succeed = dl.apps.delete(app_id='app_id')
290
291
  """
291
- if app_id is None and app_name is None:
292
+ if app is None and app_id is None:
292
293
  raise exceptions.PlatformException(
293
294
  error='400',
294
295
  message='You must provide an identifier in inputs')
295
- if app_name is not None:
296
- app = self.__get_by_name(app_name)
296
+
297
+ if app is not None:
297
298
  app_id = app.id
298
299
 
299
300
  success, response = self._client_api.gen_request(req_type='delete', path='/apps/{}'.format(app_id))
@@ -302,18 +303,16 @@ class Apps:
302
303
 
303
304
  try:
304
305
  app = self.get(app_id=app_id)
305
- except Exception as e:
306
- if e.status_code == '404':
307
- return success
308
- else:
309
- raise e
310
- if app.metadata:
306
+ except exceptions.NotFound:
307
+ return success
308
+
309
+ if wait and app.status == entities.CompositionStatus.TERMINATING and app.metadata is not None:
311
310
  command_id = app.metadata.get('system', {}).get('commands', {}).get('uninstall', None)
312
- if wait and app.status == entities.CompositionStatus.TERMINATING and command_id is not None:
311
+ if command_id is not None:
313
312
  command = self.commands.get(command_id=command_id, url='api/v1/commands/faas/{}'.format(command_id))
314
313
  command.wait()
315
314
 
316
- logger.debug(f"App deleted successfully (id: {app_id}, name: {app_name}")
315
+ logger.debug(f"App deleted successfully (id: {app.id}, name: {app.name}")
317
316
  return success
318
317
 
319
318
  def resume(self, app: entities.App = None, app_id: str = None) -> bool: