dtlpy 1.100.5__py3-none-any.whl → 1.102.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. dtlpy/__version__.py +1 -1
  2. dtlpy/entities/app.py +2 -2
  3. dtlpy/entities/dataset.py +66 -0
  4. dtlpy/entities/dpk.py +1 -0
  5. dtlpy/entities/integration.py +4 -1
  6. dtlpy/entities/item.py +54 -0
  7. dtlpy/entities/model.py +2 -5
  8. dtlpy/entities/ontology.py +2 -2
  9. dtlpy/entities/package.py +2 -2
  10. dtlpy/entities/paged_entities.py +1 -1
  11. dtlpy/entities/pipeline.py +5 -4
  12. dtlpy/entities/pipeline_execution.py +2 -2
  13. dtlpy/entities/recipe.py +1 -1
  14. dtlpy/entities/service.py +10 -0
  15. dtlpy/entities/task.py +1 -1
  16. dtlpy/entities/trigger.py +4 -4
  17. dtlpy/ml/base_model_adapter.py +1 -1
  18. dtlpy/repositories/commands.py +1 -1
  19. dtlpy/repositories/datasets.py +107 -2
  20. dtlpy/repositories/downloader.py +2 -2
  21. dtlpy/repositories/dpks.py +14 -3
  22. dtlpy/repositories/models.py +12 -2
  23. dtlpy/repositories/services.py +35 -0
  24. dtlpy/repositories/uploader.py +1 -1
  25. dtlpy/services/api_client.py +93 -2
  26. dtlpy/utilities/converter.py +2 -2
  27. {dtlpy-1.100.5.dist-info → dtlpy-1.102.14.dist-info}/METADATA +1 -1
  28. {dtlpy-1.100.5.dist-info → dtlpy-1.102.14.dist-info}/RECORD +37 -37
  29. tests/assets/models_flow/main.py +4 -0
  30. tests/features/environment.py +20 -2
  31. {dtlpy-1.100.5.data → dtlpy-1.102.14.data}/scripts/dlp +0 -0
  32. {dtlpy-1.100.5.data → dtlpy-1.102.14.data}/scripts/dlp.bat +0 -0
  33. {dtlpy-1.100.5.data → dtlpy-1.102.14.data}/scripts/dlp.py +0 -0
  34. {dtlpy-1.100.5.dist-info → dtlpy-1.102.14.dist-info}/LICENSE +0 -0
  35. {dtlpy-1.100.5.dist-info → dtlpy-1.102.14.dist-info}/WHEEL +0 -0
  36. {dtlpy-1.100.5.dist-info → dtlpy-1.102.14.dist-info}/entry_points.txt +0 -0
  37. {dtlpy-1.100.5.dist-info → dtlpy-1.102.14.dist-info}/top_level.txt +0 -0
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.100.5'
1
+ version = '1.102.14'
dtlpy/entities/app.py CHANGED
@@ -132,7 +132,7 @@ class App(entities.BaseEntity):
132
132
  return self.apps.pause(self)
133
133
 
134
134
  @staticmethod
135
- def _protected_from_json(_json, client_api, project, is_fetched=True):
135
+ def _protected_from_json(_json, client_api, project=None, is_fetched=True):
136
136
  """
137
137
  Same as from_json but with try-except to catch if error
138
138
 
@@ -193,7 +193,7 @@ class App(entities.BaseEntity):
193
193
  return _json
194
194
 
195
195
  @classmethod
196
- def from_json(cls, _json, client_api: ApiClient, project: entities.Project, is_fetched=True):
196
+ def from_json(cls, _json, client_api: ApiClient, project: entities.Project=None, is_fetched=True):
197
197
  app = cls(
198
198
  id=_json.get('id', None),
199
199
  name=_json.get('name', None),
dtlpy/entities/dataset.py CHANGED
@@ -1149,3 +1149,69 @@ class Dataset(entities.BaseEntity):
1149
1149
  ontology_id = recipe.ontology_ids[0]
1150
1150
  ontology = recipe.ontologies.get(ontology_id=ontology_id)
1151
1151
  return ontology.delete_attributes(ontology_id=ontology.id, keys=keys)
1152
+
1153
+ def split_ml_subsets(self,
1154
+ items_query = None,
1155
+ percentages: dict = None ):
1156
+ """
1157
+ Split dataset items into ML subsets.
1158
+
1159
+ :param dl.Filters items_query: Filters object to select items.
1160
+ :param dict percentages: {'train': x, 'validation': y, 'test': z}.
1161
+ :return: True if the split operation was successful.
1162
+ :rtype: bool
1163
+ """
1164
+ return self.datasets.split_ml_subsets(dataset_id=self.id,
1165
+ items_query=items_query,
1166
+ ml_split_list=percentages)
1167
+
1168
+ def assign_subset_to_items(self, subset: str, items_query=None) -> bool:
1169
+ """
1170
+ Assign a specific ML subset (train/validation/test) to items defined by the given filters.
1171
+ This will set the chosen subset to True and the others to None.
1172
+
1173
+ :param dl.Filters items_query: Filters to select items
1174
+ :param str subset: 'train', 'validation', or 'test'
1175
+ :return: True if successful
1176
+ :rtype: bool
1177
+ """
1178
+
1179
+ return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
1180
+ items_query=items_query,
1181
+ subset=subset)
1182
+
1183
+ def remove_subset_from_items(self, items_query= None,) -> bool:
1184
+ """
1185
+ Remove any ML subset assignment from items defined by the given filters.
1186
+ This sets train, validation, and test tags to None.
1187
+
1188
+ :param dl.Filters items_query: Filters to select items
1189
+ :return: True if successful
1190
+ :rtype: bool
1191
+ """
1192
+ return self.datasets.bulk_update_ml_subset(dataset_id=self.id,
1193
+ items_query=items_query,
1194
+ subset=None,
1195
+ deleteTag=True)
1196
+
1197
+ def get_items_missing_ml_subset(self, filters = None) -> list:
1198
+ """
1199
+ Get the list of item IDs that are missing ML subset assignment.
1200
+ An item is considered missing ML subset if train, validation, and test tags are not True (all None).
1201
+
1202
+ :param dl.Filters filters: optional filters to narrow down items. If None, will use a default filter for files.
1203
+ :return: list of item IDs
1204
+ :rtype: list
1205
+ """
1206
+ if filters is None:
1207
+ filters = entities.Filters()
1208
+ filters.add(field='metadata.system.tags.train', values=None)
1209
+ filters.add(field='metadata.system.tags.validation', values=None)
1210
+ filters.add(field='metadata.system.tags.test', values=None)
1211
+ missing_ids = []
1212
+ pages = self.items.list(filters=filters)
1213
+ for page in pages:
1214
+ for item in page:
1215
+ # item that pass filters means no subsets assigned
1216
+ missing_ids.append(item.id)
1217
+ return missing_ids
dtlpy/entities/dpk.py CHANGED
@@ -48,6 +48,7 @@ class Toolbar(entities.DlEntity):
48
48
  invoke: dict = entities.DlProperty(location=['invoke'], _type=dict)
49
49
  location: str = entities.DlProperty(location=['location'], _type=str)
50
50
  icon: str = entities.DlProperty(location=['icon'], _type=str)
51
+ action: str = entities.DlProperty(location=['action'], _type=str, default=None)
51
52
 
52
53
  def to_json(self) -> dict:
53
54
  return self._dict.copy()
@@ -34,6 +34,8 @@ class IntegrationType(str, Enum):
34
34
  - KEY VALUE Integration - for save secrets in the platform
35
35
  * - GCP_WORKLOAD_IDENTITY_FEDERATION
36
36
  - GCP Workload Identity Federation Integration - for GCP drivers
37
+ * - PRIVATE_REGISTRY
38
+ - PRIVATE REGISTRY Integration - for private registry drivers
37
39
  """
38
40
  S3 = "s3"
39
41
  AWS_CROSS_ACCOUNT = 'aws-cross'
@@ -42,7 +44,8 @@ class IntegrationType(str, Enum):
42
44
  GCS_CROSS = "gcp-cross"
43
45
  AZUREBLOB = "azureblob"
44
46
  KEY_VALUE = "key_value"
45
- GCP_WORKLOAD_IDENTITY_FEDERATION = "gcp-workload-identity-federation"
47
+ GCP_WORKLOAD_IDENTITY_FEDERATION = "gcp-workload-identity-federation",
48
+ PRIVATE_REGISTRY = "private-registry"
46
49
 
47
50
 
48
51
  @attr.s
dtlpy/entities/item.py CHANGED
@@ -716,6 +716,60 @@ class Item(entities.BaseEntity):
716
716
  self._platform_dict = self.update()._platform_dict
717
717
  return self
718
718
 
719
+ def assign_subset(self, subset: str):
720
+ """
721
+ Assign a single ML subset (train/validation/test) to this item.
722
+ Sets the chosen subset to True and the others to None.
723
+ Then calls item.update(system_metadata=True).
724
+
725
+ :param str subset: 'train', 'validation', or 'test'
726
+ """
727
+ if subset not in ['train', 'validation', 'test']:
728
+ raise ValueError("subset must be one of: 'train', 'validation', 'test'")
729
+
730
+ if 'system' not in self.metadata:
731
+ self.metadata['system'] = {}
732
+ if 'tags' not in self.metadata['system']:
733
+ self.metadata['system']['tags'] = {}
734
+
735
+ self.metadata['system']['tags']['train'] = True if subset == 'train' else None
736
+ self.metadata['system']['tags']['validation'] = True if subset == 'validation' else None
737
+ self.metadata['system']['tags']['test'] = True if subset == 'test' else None
738
+
739
+ return self.update(system_metadata=True)
740
+
741
+
742
+ def remove_subset(self):
743
+ """
744
+ Remove any ML subset assignment from this item.
745
+ Sets train, validation, and test to None.
746
+ Then calls item.update(system_metadata=True).
747
+ """
748
+ if 'system' not in self.metadata:
749
+ self.metadata['system'] = {}
750
+ if 'tags' not in self.metadata['system']:
751
+ self.metadata['system']['tags'] = {}
752
+
753
+ self.metadata['system']['tags']['train'] = None
754
+ self.metadata['system']['tags']['validation'] = None
755
+ self.metadata['system']['tags']['test'] = None
756
+
757
+ return self.update(system_metadata=True)
758
+
759
+
760
+ def get_current_subset(self) -> str:
761
+ """
762
+ Get the current ML subset assignment of this item.
763
+ Returns 'train', 'validation', 'test', or None if not assigned.
764
+
765
+ :return: subset name or None
766
+ :rtype: str or None
767
+ """
768
+ tags = self.metadata.get('system', {}).get('tags', {})
769
+ for subset in ['train', 'validation', 'test']:
770
+ if tags.get(subset) is True:
771
+ return subset
772
+ return None
719
773
 
720
774
  class ModalityTypeEnum(str, Enum):
721
775
  """
dtlpy/entities/model.py CHANGED
@@ -1,11 +1,8 @@
1
- import json
2
1
  from collections import namedtuple
3
2
  from enum import Enum
4
3
  import traceback
5
4
  import logging
6
-
7
5
  import attr
8
-
9
6
  from .. import repositories, entities
10
7
  from ..services.api_client import ApiClient
11
8
 
@@ -122,7 +119,7 @@ class Model(entities.BaseEntity):
122
119
  app = attr.ib(default=None)
123
120
 
124
121
  @staticmethod
125
- def _protected_from_json(_json, client_api, project, package, is_fetched=True):
122
+ def _protected_from_json(_json, client_api, project=None, package=None, is_fetched=True):
126
123
  """
127
124
  Same as from_json but with try-except to catch if error
128
125
 
@@ -146,7 +143,7 @@ class Model(entities.BaseEntity):
146
143
  return status, model
147
144
 
148
145
  @classmethod
149
- def from_json(cls, _json, client_api, project, package, is_fetched=True):
146
+ def from_json(cls, _json, client_api, project=None, package=None, is_fetched=True):
150
147
  """
151
148
  Turn platform representation of model into a model entity
152
149
 
@@ -185,7 +185,7 @@ class Ontology(entities.BaseEntity):
185
185
  self._color_map = values
186
186
 
187
187
  @staticmethod
188
- def _protected_from_json(_json, client_api, recipe, dataset, project, is_fetched=True):
188
+ def _protected_from_json(_json, client_api, recipe=None, dataset=None, project=None, is_fetched=True):
189
189
  """
190
190
  Same as from_json but with try-except to catch if error
191
191
  :param _json: platform json
@@ -217,7 +217,7 @@ class Ontology(entities.BaseEntity):
217
217
  return True
218
218
 
219
219
  @classmethod
220
- def from_json(cls, _json, client_api, recipe, dataset=None, project=None, is_fetched=True):
220
+ def from_json(cls, _json, client_api, recipe=None, dataset=None, project=None, is_fetched=True):
221
221
  """
222
222
  Build an Ontology entity object from a json
223
223
 
dtlpy/entities/package.py CHANGED
@@ -124,7 +124,7 @@ class Package(entities.DlEntity):
124
124
  self.codebase = entities.ItemCodebase(item_id=item_id)
125
125
 
126
126
  @staticmethod
127
- def _protected_from_json(_json, client_api, project, is_fetched=True):
127
+ def _protected_from_json(_json, client_api, project=None, is_fetched=True):
128
128
  """
129
129
  Same as from_json but with try-except to catch if error
130
130
 
@@ -144,7 +144,7 @@ class Package(entities.DlEntity):
144
144
  return status, package
145
145
 
146
146
  @classmethod
147
- def from_json(cls, _json, client_api, project, is_fetched=True):
147
+ def from_json(cls, _json, client_api, project=None, is_fetched=True):
148
148
  """
149
149
  Turn platform representation of package into a package entity
150
150
 
@@ -71,7 +71,7 @@ class PagedEntities:
71
71
  return self.items_count
72
72
 
73
73
  def __iter__(self):
74
- pbar = tqdm.tqdm(total=self.total_pages_count, disable=self._client_api.verbose.disable_progress_bar,
74
+ pbar = tqdm.tqdm(total=self.total_pages_count, disable=self._client_api.verbose.disable_progress_bar_iterate_pages,
75
75
  file=sys.stdout, desc="Iterate Pages")
76
76
  if self.page_offset != 0:
77
77
  # reset the count for page 0
@@ -232,11 +232,12 @@ class Pipeline(entities.BaseEntity):
232
232
  updated_by = attr.ib(default=None)
233
233
 
234
234
  @staticmethod
235
- def _protected_from_json(_json, client_api, project, is_fetched=True):
235
+ def _protected_from_json(_json, client_api, project=None, is_fetched=True):
236
236
  """
237
237
  Same as from_json but with try-except to catch if error
238
238
  :param _json: platform json
239
239
  :param client_api: ApiClient entity
240
+ :param dtlpy.entities.project.Project project: entity
240
241
  :param is_fetched: is Entity fetched from Platform
241
242
  :return:
242
243
  """
@@ -254,13 +255,13 @@ class Pipeline(entities.BaseEntity):
254
255
  return status, pipeline
255
256
 
256
257
  @classmethod
257
- def from_json(cls, _json, client_api, project, is_fetched=True):
258
+ def from_json(cls, _json, client_api, project=None, is_fetched=True):
258
259
  """
259
260
  Turn platform representation of pipeline into a pipeline entity
260
261
 
261
262
  :param dict _json: platform representation of package
262
263
  :param dl.ApiClient client_api: ApiClient entity
263
- :param dtlpy.entities.project.Project project: project entity
264
+ :param dtlpy.entities.project.Project project: entity
264
265
  :param bool is_fetched: is Entity fetched from Platform
265
266
  :return: Pipeline entity
266
267
  :rtype: dtlpy.entities.pipeline.Pipeline
@@ -308,7 +309,7 @@ class Pipeline(entities.BaseEntity):
308
309
  return inst
309
310
 
310
311
  @classmethod
311
- def pipeline_node(self, _json):
312
+ def pipeline_node(cls, _json):
312
313
  node_type = _json.get('type')
313
314
  if node_type == 'task':
314
315
  return TaskNode.from_json(_json)
@@ -91,7 +91,7 @@ class PipelineExecution(entities.BaseEntity):
91
91
  _repositories = attr.ib(repr=False)
92
92
 
93
93
  @staticmethod
94
- def _protected_from_json(_json, client_api, pipeline, is_fetched=True):
94
+ def _protected_from_json(_json, client_api, pipeline=None, is_fetched=True):
95
95
  """
96
96
  Same as from_json but with try-except to catch if error
97
97
  :param _json: platform json
@@ -114,7 +114,7 @@ class PipelineExecution(entities.BaseEntity):
114
114
  return status, pipeline
115
115
 
116
116
  @classmethod
117
- def from_json(cls, _json, client_api, pipeline, is_fetched=True) -> 'PipelineExecution':
117
+ def from_json(cls, _json, client_api, pipeline=None, is_fetched=True) -> 'PipelineExecution':
118
118
  """
119
119
  Turn platform representation of pipeline_execution into a pipeline_execution entity
120
120
 
dtlpy/entities/recipe.py CHANGED
@@ -57,7 +57,7 @@ class Recipe(entities.BaseEntity):
57
57
  Build a Recipe entity object from a json
58
58
 
59
59
  :param dict _json: _json response from host
60
- :param dtlpy.entities.dataset.Dataset Dataset: Dataset entity
60
+ :param dtlpy.entities.dataset.Dataset dataset: Dataset entity
61
61
  :param dtlpy.entities.project.Project project: project entity
62
62
  :param dl.ApiClient client_api: ApiClient entity
63
63
  :param bool is_fetched: is Entity fetched from Platform
dtlpy/entities/service.py CHANGED
@@ -839,6 +839,16 @@ class Service(entities.BaseEntity):
839
839
  **kwargs
840
840
  )
841
841
 
842
+ def restart(self, replica_name: str = None):
843
+ """
844
+ Restart service
845
+
846
+ :param str replica_name: replica name
847
+ :return: True
848
+ :rtype: bool
849
+ """
850
+ return self.services.restart(service=self, replica_name=replica_name)
851
+
842
852
 
843
853
  class KubernetesAutoscalerType(str, Enum):
844
854
  """ The Service Autoscaler Type (RABBITMQ, CPU).
dtlpy/entities/task.py CHANGED
@@ -116,7 +116,7 @@ class Task:
116
116
  self._description['content'] = description
117
117
 
118
118
  @staticmethod
119
- def _protected_from_json(_json, client_api, project, dataset):
119
+ def _protected_from_json(_json, client_api, project=None, dataset=None):
120
120
  """
121
121
  Same as from_json but with try-except to catch if error
122
122
 
dtlpy/entities/trigger.py CHANGED
@@ -119,7 +119,7 @@ class BaseTrigger(entities.BaseEntity):
119
119
  return self.updated_at
120
120
 
121
121
  @staticmethod
122
- def _protected_from_json(_json, client_api, project, service=None):
122
+ def _protected_from_json(_json, client_api, project=None, service=None):
123
123
  """
124
124
  Same as from_json but with try-except to catch if error
125
125
 
@@ -141,7 +141,7 @@ class BaseTrigger(entities.BaseEntity):
141
141
  return status, trigger
142
142
 
143
143
  @classmethod
144
- def from_json(cls, _json, client_api, project, service=None):
144
+ def from_json(cls, _json, client_api, project=None, service=None):
145
145
  """
146
146
  Build a trigger entity object from a json
147
147
 
@@ -305,7 +305,7 @@ class Trigger(BaseTrigger):
305
305
  return _json
306
306
 
307
307
  @classmethod
308
- def from_json(cls, _json, client_api, project, service=None):
308
+ def from_json(cls, _json, client_api, project=None, service=None):
309
309
  """
310
310
  Build a trigger entity object from a json
311
311
 
@@ -377,7 +377,7 @@ class CronTrigger(BaseTrigger):
377
377
  return _json
378
378
 
379
379
  @classmethod
380
- def from_json(cls, _json, client_api, project, service=None):
380
+ def from_json(cls, _json, client_api, project=None, service=None):
381
381
  """
382
382
  Build a trigger entity object from a json
383
383
 
@@ -486,7 +486,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
486
486
  if feature_set is None:
487
487
  logger.info('Feature Set not found. creating... ')
488
488
  try:
489
- self.model_entity.project.feature_sets.get(name=self.model_entity.name)
489
+ self.model_entity.project.feature_sets.get(feature_set_name=self.model_entity.name)
490
490
  feature_set_name = f"{self.model_entity.name}-{''.join(random.choices(string.ascii_letters + string.digits, k=5))}"
491
491
  logger.warning(f"Feature set with the model name already exists. Creating new feature set with name {feature_set_name}")
492
492
  except exceptions.NotFound:
@@ -95,7 +95,7 @@ class Commands:
95
95
 
96
96
  command = None
97
97
  pbar = tqdm.tqdm(total=100,
98
- disable=self._client_api.verbose.disable_progress_bar,
98
+ disable=self._client_api.verbose.disable_progress_bar_command_progress,
99
99
  file=sys.stdout,
100
100
  desc='Command Progress')
101
101
  num_tries = 1
@@ -1008,7 +1008,7 @@ class Datasets:
1008
1008
  pool = dataset._client_api.thread_pools(pool_name='dataset.download')
1009
1009
  jobs = [None for _ in range(pages.items_count)]
1010
1010
  progress = tqdm.tqdm(total=pages.items_count,
1011
- disable=dataset._client_api.verbose.disable_progress_bar,
1011
+ disable=dataset._client_api.verbose.disable_progress_bar_download_annotations,
1012
1012
  file=sys.stdout, desc='Download Annotations')
1013
1013
  i_item = 0
1014
1014
  for page in pages:
@@ -1081,7 +1081,7 @@ class Datasets:
1081
1081
  filters._user_query = 'false'
1082
1082
  pages = dataset.items.list(filters=filters)
1083
1083
  total_items = pages.items_count
1084
- pbar = tqdm.tqdm(total=total_items, disable=dataset._client_api.verbose.disable_progress_bar,
1084
+ pbar = tqdm.tqdm(total=total_items, disable=dataset._client_api.verbose.disable_progress_bar_upload_annotations,
1085
1085
  file=sys.stdout, desc='Upload Annotations')
1086
1086
  pool = self._client_api.thread_pools('annotation.upload')
1087
1087
  annotations_uploaded_count = 0
@@ -1126,3 +1126,108 @@ class Datasets:
1126
1126
  """
1127
1127
  import warnings
1128
1128
  warnings.warn("`readonly` flag on dataset is deprecated, doing nothing.", DeprecationWarning)
1129
+
1130
+
1131
+ @_api_reference.add(path='/datasets/{id}/split', method='post')
1132
+ def split_ml_subsets(self,
1133
+ dataset_id: str,
1134
+ items_query: entities.filters,
1135
+ ml_split_list: dict) -> bool:
1136
+ """
1137
+ Split dataset items into ML subsets.
1138
+
1139
+ :param str dataset_id: The ID of the dataset.
1140
+ :param dict items_query: Query to select items.
1141
+ :param dict ml_split_list: Dictionary with 'train', 'validation', 'test' keys and integer percentages.
1142
+ :return: True if the split operation was successful.
1143
+ :rtype: bool
1144
+ :raises: PlatformException on failure and ValueError if percentages do not sum to 100 or invalid keys/values.
1145
+ """
1146
+ # Validate percentages
1147
+ if not ml_split_list:
1148
+ ml_split_list = {'train': 80, 'validation': 10, 'test': 10}
1149
+
1150
+ if not items_query:
1151
+ items_query = entities.Filters()
1152
+
1153
+ items_query_dict = items_query.prepare()
1154
+ required_keys = {'train', 'validation', 'test'}
1155
+ if set(ml_split_list.keys()) != required_keys:
1156
+ raise ValueError("MLSplitList must have exactly the keys 'train', 'validation', 'test'.")
1157
+ total = sum(ml_split_list.values())
1158
+ if total != 100:
1159
+ raise ValueError(
1160
+ "Please set the Train, Validation, and Test subsets percentages to add up to 100%. "
1161
+ "For example: 70, 15, 15."
1162
+ )
1163
+ for key, value in ml_split_list.items():
1164
+ if not isinstance(value, int) or value < 0:
1165
+ raise ValueError("Percentages must be integers >= 0.")
1166
+ payload = {
1167
+ 'itemsQuery': items_query_dict,
1168
+ 'MLSplitList': ml_split_list
1169
+ }
1170
+ path = f'/datasets/{dataset_id}/split'
1171
+ success, response = self._client_api.gen_request(req_type='post',
1172
+ path=path,
1173
+ json_req=payload)
1174
+ if success:
1175
+ # Wait for the split operation to complete
1176
+ command = entities.Command.from_json(_json=response.json(),
1177
+ client_api=self._client_api)
1178
+ command.wait()
1179
+ return True
1180
+ else:
1181
+ raise exceptions.PlatformException(response)
1182
+
1183
+
1184
+ @_api_reference.add(path='/datasets/{id}/items/bulk-update-metadata', method='post')
1185
+ def bulk_update_ml_subset(self, dataset_id: str, items_query: dict, subset: str = None, deleteTag: bool = False) -> bool:
1186
+ """
1187
+ Bulk update ML subset assignment for selected items.
1188
+ If subset is None, remove subsets. Otherwise, assign the specified subset.
1189
+
1190
+ :param str dataset_id: ID of the dataset
1191
+ :param dict items_query: DQLResourceQuery (filters) for selecting items
1192
+ :param str subset: 'train', 'validation', 'test' or None to remove all
1193
+ :return: True if success
1194
+ :rtype: bool
1195
+ """
1196
+ if items_query is None:
1197
+ items_query = entities.Filters()
1198
+ items_query_dict = items_query.prepare()
1199
+ if not deleteTag and subset not in ['train', 'validation', 'test']:
1200
+ raise ValueError("subset must be one of: 'train', 'validation', 'test'")
1201
+ # Determine tag values based on subset
1202
+ tags = {
1203
+ 'train': True if subset == 'train' else None,
1204
+ 'validation': True if subset == 'validation' else None,
1205
+ 'test': True if subset == 'test' else None
1206
+ }
1207
+
1208
+ payload = {
1209
+ "query": items_query_dict,
1210
+ "updateQuery": {
1211
+ "update": {
1212
+ "metadata": {
1213
+ "system": {
1214
+ "tags": tags
1215
+ }
1216
+ }
1217
+ },
1218
+ "systemSpace": True
1219
+ }
1220
+ }
1221
+
1222
+ success, response = self._client_api.gen_request(
1223
+ req_type='post',
1224
+ path=f'/datasets/{dataset_id}/items/bulk-update-metadata',
1225
+ json_req=payload
1226
+ )
1227
+ if success:
1228
+ # Similar to split operation, a command is returned
1229
+ command = entities.Command.from_json(_json=response.json(), client_api=self._client_api)
1230
+ command.wait()
1231
+ return True
1232
+ else:
1233
+ raise exceptions.PlatformException(response)
@@ -211,7 +211,7 @@ class Downloader:
211
211
  # pool
212
212
  pool = client_api.thread_pools(pool_name='item.download')
213
213
  # download
214
- pbar = tqdm.tqdm(total=num_items, disable=client_api.verbose.disable_progress_bar, file=sys.stdout,
214
+ pbar = tqdm.tqdm(total=num_items, disable=client_api.verbose.disable_progress_bar_download_dataset, file=sys.stdout,
215
215
  desc='Download Items')
216
216
  try:
217
217
  i_item = 0
@@ -699,7 +699,7 @@ class Downloader:
699
699
  unit_divisor=1024,
700
700
  position=1,
701
701
  file=sys.stdout,
702
- disable=self.items_repository._client_api.verbose.disable_progress_bar,
702
+ disable=self.items_repository._client_api.verbose.disable_progress_bar_download_item,
703
703
  desc='Download Item')
704
704
  except Exception as err:
705
705
  one_file_progress_bar = False
@@ -196,8 +196,13 @@ class Dpks:
196
196
  dpk = dpk_v.items[0]
197
197
  return dpk
198
198
 
199
- def publish(self, dpk: entities.Dpk = None, ignore_max_file_size: bool = False,
200
- manifest_filepath='dataloop.json') -> entities.Dpk:
199
+ def publish(
200
+ self,
201
+ dpk: entities.Dpk = None,
202
+ ignore_max_file_size: bool = False,
203
+ manifest_filepath='dataloop.json',
204
+ local_path: str = None
205
+ ) -> entities.Dpk:
201
206
  """
202
207
  Upload a dpk entity to the dataloop platform.
203
208
 
@@ -207,6 +212,7 @@ class Dpks:
207
212
  during the packaging of the codebase.
208
213
  :param str manifest_filepath: Optional. Path to the manifest file. Can be absolute or relative.
209
214
  Defaults to 'dataloop.json'
215
+ :param str local_path: Optional. The path where the dpk files are located.
210
216
 
211
217
  :return the published dpk
212
218
  :rtype dl.entities.Dpk
@@ -241,8 +247,13 @@ class Dpks:
241
247
  if self.project and self.project.id != dpk.context['project']:
242
248
  logger.warning("the project id that provide different from the dpk project id")
243
249
 
250
+ if local_path is None:
251
+ if manifest_filepath=='dataloop.json':
252
+ local_path = os.getcwd()
253
+ else:
254
+ local_path = os.path.dirname(manifest_filepath)
244
255
  if dpk.codebase is None:
245
- dpk.codebase = self.project.codebases.pack(directory=os.getcwd(),
256
+ dpk.codebase = self.project.codebases.pack(directory=local_path,
246
257
  name=dpk.display_name,
247
258
  extension='dpk',
248
259
  ignore_directories=['artifacts'],
@@ -674,7 +674,12 @@ class Models:
674
674
  raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
675
675
  if item_ids is None and dataset_id is None:
676
676
  raise ValueError("Need to provide either item_ids or dataset_id")
677
- payload = {'input': {'itemIds': item_ids, 'datasetId': dataset_id},
677
+ payload_input = {}
678
+ if item_ids is not None:
679
+ payload_input['itemIds'] = item_ids
680
+ if dataset_id is not None:
681
+ payload_input['datasetId'] = dataset_id
682
+ payload = {'input': payload_input,
678
683
  'config': {'serviceId': model.metadata['system']['deploy']['services'][0]}}
679
684
 
680
685
  success, response = self._client_api.gen_request(req_type="post",
@@ -701,7 +706,12 @@ class Models:
701
706
  raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
702
707
  if item_ids is None and dataset_id is None:
703
708
  raise ValueError("Need to provide either item_ids or dataset_id")
704
- payload = {'input': {'itemIds': item_ids, 'datasetId': dataset_id},
709
+ payload_input = {}
710
+ if item_ids is not None:
711
+ payload_input['itemIds'] = item_ids
712
+ if dataset_id is not None:
713
+ payload_input['datasetId'] = dataset_id
714
+ payload = {'input': payload_input,
705
715
  'config': {'serviceId': model.metadata['system']['deploy']['services'][0]}}
706
716
 
707
717
  success, response = self._client_api.gen_request(req_type="post",
@@ -1589,6 +1589,40 @@ class Services:
1589
1589
 
1590
1590
  return True
1591
1591
 
1592
+ def restart(self, service: entities.Service, replica_name: str = None):
1593
+ """
1594
+ Restart service replica
1595
+
1596
+ **Prerequisites**: You must be in the role of a *developer*.
1597
+
1598
+ :param dtlpy.entities.service.Service service: service entity
1599
+ :param str replica_name: replica name
1600
+ :return: True
1601
+ :rtype: bool
1602
+
1603
+ **Example**:
1604
+
1605
+ .. code-block:: python
1606
+
1607
+ is_restarted = dl.services.restart(service='service_entity',
1608
+ replica_name='replica_name')
1609
+ """
1610
+ payload = {}
1611
+
1612
+ if replica_name is not None:
1613
+ payload['replicaName'] = replica_name
1614
+
1615
+ # request
1616
+ success, response = self._client_api.gen_request(req_type='post',
1617
+ path='/services/{}/restart'.format(service.id),
1618
+ json_req=payload)
1619
+
1620
+ # exception handling
1621
+ if not success:
1622
+ raise exceptions.PlatformException(response)
1623
+
1624
+ return True
1625
+
1592
1626
 
1593
1627
  class ServiceLog:
1594
1628
  """
@@ -1661,3 +1695,4 @@ class ServiceLog:
1661
1695
  for log in self.logs:
1662
1696
  yield '{}: {}'.format(log.get('timestamp', self.start), log.get('message', '').strip())
1663
1697
  self.get_next_log()
1698
+
@@ -38,7 +38,7 @@ class Uploader:
38
38
  self.num_files = 0
39
39
  self.i_item = 0
40
40
  self.pbar = tqdm.tqdm(total=0,
41
- disable=self.items_repository._client_api.verbose.disable_progress_bar,
41
+ disable=self.items_repository._client_api.verbose.disable_progress_bar_upload_items,
42
42
  file=sys.stdout, desc='Upload Items')
43
43
  self.reporter = Reporter(num_workers=0,
44
44
  resource=Reporter.ITEMS_UPLOAD,
@@ -104,6 +104,16 @@ class Verbose:
104
104
  __DEFAULT_DISABLE_PROGRESS_BAR = False
105
105
  __DEFAULT_PRINT_ALL_RESPONSES = False
106
106
  __PRINT_ERROR_LOGS = False
107
+ __DEFAULT_PROGRESS_BAR_SETTINGS = {
108
+ 'Iterate Pages': False,
109
+ 'Command Progress': False,
110
+ 'Download Dataset': False,
111
+ 'Download Item': False,
112
+ 'Upload Items': False,
113
+ 'Download Annotations': False,
114
+ 'Upload Annotations': False,
115
+ 'Convert Annotations': False
116
+ }
107
117
 
108
118
  def __init__(self, cookie):
109
119
  self.cookie = cookie
@@ -115,6 +125,7 @@ class Verbose:
115
125
  self._disable_progress_bar = self.__DEFAULT_DISABLE_PROGRESS_BAR
116
126
  self._print_all_responses = self.__DEFAULT_PRINT_ALL_RESPONSES
117
127
  self._print_error_logs = self.__PRINT_ERROR_LOGS
128
+ self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
118
129
  if os.getenv('DTLPY_REFRESH_TOKEN_METHOD', "") == "proxy":
119
130
  self._print_error_logs = True
120
131
  self.to_cookie()
@@ -123,7 +134,9 @@ class Verbose:
123
134
  dictionary = {'logging_level': self._logging_level,
124
135
  'disable_progress_bar': self._disable_progress_bar,
125
136
  'print_all_responses': self._print_all_responses,
126
- 'print_error_logs': self._print_error_logs}
137
+ 'print_error_logs': self._print_error_logs,
138
+ 'progress_bar_setting': json.dumps(self._progress_bar_settings)
139
+ }
127
140
  self.cookie.put(key='verbose', value=dictionary)
128
141
 
129
142
  def from_cookie(self, dictionary):
@@ -131,6 +144,83 @@ class Verbose:
131
144
  self._disable_progress_bar = dictionary.get('disable_progress_bar', self.__DEFAULT_DISABLE_PROGRESS_BAR)
132
145
  self._print_all_responses = dictionary.get('print_all_responses', self.__DEFAULT_PRINT_ALL_RESPONSES)
133
146
  self._print_error_logs = dictionary.get('print_error_logs', self.__PRINT_ERROR_LOGS)
147
+ progress_bar_settings = dictionary.get('progress_bar_setting', None)
148
+ if progress_bar_settings is None:
149
+ self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
150
+ else:
151
+ self._progress_bar_settings = json.loads(progress_bar_settings)
152
+
153
+ @property
154
+ def disable_progress_bar_iterate_pages(self):
155
+ return self._disable_progress_bar or self._progress_bar_settings.get('Iterate Pages', False)
156
+
157
+ @disable_progress_bar_iterate_pages.setter
158
+ def disable_progress_bar_iterate_pages(self, val):
159
+ self._progress_bar_settings['Iterate Pages'] = val
160
+ self.to_cookie()
161
+
162
+ @property
163
+ def disable_progress_bar_command_progress(self):
164
+ return self._disable_progress_bar or self._progress_bar_settings.get('Command Progress', False)
165
+
166
+ @disable_progress_bar_command_progress.setter
167
+ def disable_progress_bar_command_progress(self, val):
168
+ self._progress_bar_settings['Command Progress'] = val
169
+ self.to_cookie()
170
+
171
+ @property
172
+ def disable_progress_bar_download_item(self):
173
+ return self._disable_progress_bar or self._progress_bar_settings.get('Download Item', False)
174
+
175
+ @disable_progress_bar_download_item.setter
176
+ def disable_progress_bar_download_item(self, val):
177
+ self._progress_bar_settings['Download Item'] = val
178
+ self.to_cookie()
179
+
180
+ @property
181
+ def disable_progress_bar_download_dataset(self):
182
+ return self._disable_progress_bar or self._progress_bar_settings.get('Download Dataset', False)
183
+
184
+ @disable_progress_bar_download_dataset.setter
185
+ def disable_progress_bar_download_dataset(self, val):
186
+ self._progress_bar_settings['Download Dataset'] = val
187
+ self.to_cookie()
188
+
189
+ @property
190
+ def disable_progress_bar_upload_items(self):
191
+ return self._disable_progress_bar or self._progress_bar_settings.get('Upload Items', False)
192
+
193
+ @disable_progress_bar_upload_items.setter
194
+ def disable_progress_bar_upload_items(self, val):
195
+ self._progress_bar_settings['Upload Items'] = val
196
+ self.to_cookie()
197
+
198
+ @property
199
+ def disable_progress_bar_download_annotations(self):
200
+ return self._disable_progress_bar or self._progress_bar_settings.get('Download Annotations', False)
201
+
202
+ @disable_progress_bar_download_annotations.setter
203
+ def disable_progress_bar_download_annotations(self, val):
204
+ self._progress_bar_settings['Download Annotations'] = val
205
+ self.to_cookie()
206
+
207
+ @property
208
+ def disable_progress_bar_upload_annotations(self):
209
+ return self._disable_progress_bar or self._progress_bar_settings.get('Upload Annotations', False)
210
+
211
+ @disable_progress_bar_upload_annotations.setter
212
+ def disable_progress_bar_upload_annotations(self, val):
213
+ self._progress_bar_settings['Upload Annotations'] = val
214
+ self.to_cookie()
215
+
216
+ @property
217
+ def disable_progress_bar_convert_annotations(self):
218
+ return self._disable_progress_bar or self._progress_bar_settings.get('Convert Annotations', False)
219
+
220
+ @disable_progress_bar_convert_annotations.setter
221
+ def disable_progress_bar_convert_annotations(self, val):
222
+ self._progress_bar_settings['Convert Annotations'] = val
223
+ self.to_cookie()
134
224
 
135
225
  @property
136
226
  def disable_progress_bar(self):
@@ -1073,6 +1163,7 @@ class ApiClient:
1073
1163
  headers=headers_req,
1074
1164
  chunked=stream,
1075
1165
  retry_attempts=5,
1166
+ ssl=self.verify,
1076
1167
  retry_exceptions={aiohttp.client_exceptions.ClientOSError,
1077
1168
  aiohttp.client_exceptions.ServerDisconnectedError,
1078
1169
  aiohttp.client_exceptions.ClientPayloadError},
@@ -1161,7 +1252,7 @@ class ApiClient:
1161
1252
  unit_divisor=1024,
1162
1253
  position=1,
1163
1254
  file=sys.stdout,
1164
- disable=self.verbose.disable_progress_bar,
1255
+ disable=self.verbose.disable_progress_bar_upload_items,
1165
1256
  desc='Upload Items')
1166
1257
 
1167
1258
  def callback(bytes_read):
@@ -209,7 +209,7 @@ class Converter:
209
209
  for label in labels:
210
210
  fp.write("{}\n".format(label))
211
211
 
212
- pbar = tqdm.tqdm(total=pages.items_count, disable=dataset._client_api.verbose.disable_progress_bar,
212
+ pbar = tqdm.tqdm(total=pages.items_count, disable=dataset._client_api.verbose.disable_progress_bar_convert_annotations,
213
213
  file=sys.stdout, desc='Convert Annotations')
214
214
  reporter = Reporter(
215
215
  num_workers=pages.items_count,
@@ -359,7 +359,7 @@ class Converter:
359
359
  converted_annotations = [None for _ in range(pages.items_count)]
360
360
  item_id_counter = 0
361
361
  pool = ThreadPool(processes=self.concurrency)
362
- pbar = tqdm.tqdm(total=pages.items_count, disable=dataset._client_api.verbose.disable_progress_bar,
362
+ pbar = tqdm.tqdm(total=pages.items_count, disable=dataset._client_api.verbose.disable_progress_bar_convert_annotations,
363
363
  file=sys.stdout, desc='Convert Annotations')
364
364
  reporter = Reporter(
365
365
  num_workers=pages.items_count,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dtlpy
3
- Version: 1.100.5
3
+ Version: 1.102.14
4
4
  Summary: SDK and CLI for Dataloop platform
5
5
  Home-page: https://github.com/dataloop-ai/dtlpy
6
6
  Author: Dataloop Team
@@ -1,5 +1,5 @@
1
1
  dtlpy/__init__.py,sha256=1Zngp5ftTgWb3r-sc8z98TYpEs6fOB_7snFhsXYQLVg,20899
2
- dtlpy/__version__.py,sha256=e4pBVlNGZ5kJJMpSKyUUs4mbQCOloji-ZOOFXliGh94,20
2
+ dtlpy/__version__.py,sha256=82fpG2J9U-IcmpwkJvq1am0FxF4AGGDMXe7Jqv7JIi4,21
3
3
  dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
4
4
  dtlpy/new_instance.py,sha256=u_c6JtgqsKCr7TU24-g7_CaST9ghqamMhM4Z0Zxt50w,10121
5
5
  dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
@@ -48,7 +48,7 @@ dtlpy/entities/__init__.py,sha256=Sihb652vYCyCWSQfiYTEGQW0EJsHe7BTk6-S5DOsAb0,49
48
48
  dtlpy/entities/analytic.py,sha256=5MpYDKPVsZ1MIy20Ju515RWed6P667j4TLxsan2gyNM,11925
49
49
  dtlpy/entities/annotation.py,sha256=sqgnONKbNb9gmPLJBH3mYJhFWeKjoiJ8dt57Cz7C6YA,67020
50
50
  dtlpy/entities/annotation_collection.py,sha256=CEYSBHhhDkC0VJdHsBSrA6TgdKGMcKeI3tFM40UJwS8,29838
51
- dtlpy/entities/app.py,sha256=LQ0cDuqeNtO7iihrTq8MBLrFbZiDVs6AFoAi4UQtUZY,6958
51
+ dtlpy/entities/app.py,sha256=dVd87-mP22NWvec5nqA5VjZ8Qk3aJlgUcloIAAOAPUw,6968
52
52
  dtlpy/entities/app_module.py,sha256=0UiAbBX1q8iEImi3nY7ySWZZHoRRwu0qUXmyXmgVAc4,3645
53
53
  dtlpy/entities/artifact.py,sha256=wtLtBuidOPbnba0ok40JyunCCIBGbAl4bP_ebK39Kk4,5711
54
54
  dtlpy/entities/assignment.py,sha256=Dc1QcfVf67GGcmDDi4ubESDuPkSgjXqdqjTBQ31faUM,14722
@@ -57,42 +57,42 @@ dtlpy/entities/bot.py,sha256=is3NUCnPg56HSjsHIvFcVkymValMqDV0uHRDC1Ib-ds,3819
57
57
  dtlpy/entities/codebase.py,sha256=pwRkAq2GV0wvmzshg89IAmE-0I2Wsy_-QNOu8OV8uqc,8999
58
58
  dtlpy/entities/command.py,sha256=ARu8ttk-C7_Ice7chRyTtyOtakBTF09FC04mEk73SO8,5010
59
59
  dtlpy/entities/compute.py,sha256=4FEpahPFFGHxye_fLh_p_kP6iEQ3QJK7S5hAdd6Afos,12744
60
- dtlpy/entities/dataset.py,sha256=tNCl7nNCx-DrZ3z96APhRdvllfQA1-9y8DpL6Ma2l0I,47516
60
+ dtlpy/entities/dataset.py,sha256=4_jSlAtj-vlI4egN_tg5ac_R9tS4wyK9j0-Vpf5icmY,50456
61
61
  dtlpy/entities/directory_tree.py,sha256=Rni6pLSWytR6yeUPgEdCCRfTg_cqLOdUc9uCqz9KT-Q,1186
62
- dtlpy/entities/dpk.py,sha256=fsJnKXyWTLI_sVkHt7j2stz_bdvXZ8ouNTiWSsiuUcA,17714
62
+ dtlpy/entities/dpk.py,sha256=Hqct4NbYzX7dcPxvc2QLD0QQxmGbxurwabqrHI3k6X4,17798
63
63
  dtlpy/entities/driver.py,sha256=O_QdK1EaLjQyQkmvKsmkNgmvmMb1mPjKnJGxK43KrOA,7197
64
64
  dtlpy/entities/execution.py,sha256=WBiAws-6wZnQQ3y9wyvOeexA3OjxfaRdwDu5dSFYL1g,13420
65
65
  dtlpy/entities/feature.py,sha256=9fFjD0W57anOVSAVU55ypxN_WTCsWTG03Wkc3cAAj78,3732
66
66
  dtlpy/entities/feature_set.py,sha256=niw4MkmrDbD_LWQu1X30uE6U4DCzmFhPTaYeZ6VZDB0,4443
67
67
  dtlpy/entities/filters.py,sha256=_A7rLc0yuMw1eW4gSu4-hogQzXbNUheRvFxnTQWntuo,22671
68
68
  dtlpy/entities/gis_item.py,sha256=Uk-wMBxwcHsImjz4qOjP-EyZAohbRzN43kMpCaVjCXU,3982
69
- dtlpy/entities/integration.py,sha256=CA5F1eQCGE_4c_Kry4nWRdeyjHctNnvexcDXg_M5HLU,5734
70
- dtlpy/entities/item.py,sha256=G6VVcVCudqeShWigZmNIuKD4OkvTRJ05CeXFXNe3Jk8,29691
69
+ dtlpy/entities/integration.py,sha256=j2rbAo__XKOSYJqX9wUYmPtuV4lnbmqEjA0WCrag92k,5876
70
+ dtlpy/entities/item.py,sha256=UnAZ7yLaTu2vkbD2sxysJQNn-ceC9zS3Uf304OvcC4E,31746
71
71
  dtlpy/entities/label.py,sha256=ycDYavIgKhz806plIX-64c07_TeHpDa-V7LnfFVe4Rg,3869
72
72
  dtlpy/entities/links.py,sha256=FAmEwHtsrqKet3c0UHH9u_gHgG6_OwF1-rl4xK7guME,2516
73
73
  dtlpy/entities/message.py,sha256=ApJuaKEqxATpXjNYUjGdYPu3ibQzEMo8-LtJ_4xAcPI,5865
74
- dtlpy/entities/model.py,sha256=UJCnb4gvVKpqzu_0U91hFC4_qkcYSoLhupR9lTnWp20,26892
74
+ dtlpy/entities/model.py,sha256=YwjIi3MxAZoyartTvqx_qhtDKQe6zVsQuwZbYLygMxU,26898
75
75
  dtlpy/entities/node.py,sha256=yPPYDLtNMc6vZbbf4FIffY86y7tkaTvYm42Jb7k3Ofk,39617
76
- dtlpy/entities/ontology.py,sha256=ok4p3sLBc_SS5hs2gZr5-gbblrveM7qSIX4z67QSKeQ,31967
76
+ dtlpy/entities/ontology.py,sha256=N5bsnNUfWNN49WV8lGQL8QRxbKfCZyV-rDyTlGByUrw,31987
77
77
  dtlpy/entities/organization.py,sha256=AMkx8hNIIIjnu5pYlNjckMRuKt6H3lnOAqtEynkr7wg,9893
78
- dtlpy/entities/package.py,sha256=EA5cB3nFBlsbxVK-QroZILjol2bYSVGqCby-mOyJJjQ,26353
78
+ dtlpy/entities/package.py,sha256=QSDePHlp4ik19aUE3dAUC7edh0oUUVjzSmMG867avc4,26363
79
79
  dtlpy/entities/package_defaults.py,sha256=wTD7Z7rGYjVy8AcUxTFEnkOkviiJaLVZYvduiUBKNZo,211
80
80
  dtlpy/entities/package_function.py,sha256=M42Kvw9A8b6msAkv-wRNAQg_-UC2bejniCjeKDugudc,6314
81
81
  dtlpy/entities/package_module.py,sha256=cOkIITATkzzCQpE0sdPiBUisAz8ImlPG2YGZ0K7SypA,5151
82
82
  dtlpy/entities/package_slot.py,sha256=XBwCodQe618sQm0bmx46Npo94mEk-zUV7ZX0mDRcsD8,3946
83
- dtlpy/entities/paged_entities.py,sha256=6y44H3FSclQvhB1KLI4zuIs317hWOhdHUynldRrUJkE,5913
84
- dtlpy/entities/pipeline.py,sha256=X9238WbMGfZcXdQVEtkw8twZwl0O4EZB4TxbTSEyPeI,20788
85
- dtlpy/entities/pipeline_execution.py,sha256=XCXlBAHFYVL2HajE71hK-bPxI4gTwZvg5SKri4BgyRA,9928
83
+ dtlpy/entities/paged_entities.py,sha256=grNjt2FYg4gKBlVRDkztI1BPOI4JoGeyjvmOW3BnB3k,5927
84
+ dtlpy/entities/pipeline.py,sha256=JtWGoCUhVszOVkBNK43fbTt446fkND4wH-Y-fN_llww,20851
85
+ dtlpy/entities/pipeline_execution.py,sha256=EQhW4W_G1bIPShYbJSAT--1WNQuvxVQbcQ_MCHIX0KI,9938
86
86
  dtlpy/entities/project.py,sha256=ZUx8zA3mr6N145M62R3UDPCCzO1vxfyWO6vjES-bO-g,14653
87
87
  dtlpy/entities/prompt_item.py,sha256=d4rqP961PYlJvJJDRXZPI7Z6NdwRXlx_Q0_N0xtZ_B8,19276
88
- dtlpy/entities/recipe.py,sha256=Q1HtYgind3bEe-vnDZWhw6H-rcIAGhkGHPRWtLIkPSE,11917
88
+ dtlpy/entities/recipe.py,sha256=SX0T7gw-_9Cs2FZyC_htIxQd7CwDwb2zA3SqB37vymM,11917
89
89
  dtlpy/entities/reflect_dict.py,sha256=2NaSAL-CO0T0FYRYFQlaSpbsoLT2Q18AqdHgQSLX5Y4,3273
90
90
  dtlpy/entities/resource_execution.py,sha256=1HuVV__U4jAUOtOkWlWImnM3Yts8qxMSAkMA9sBhArY,5033
91
- dtlpy/entities/service.py,sha256=3A_kcEUCbaS-Qx31rfNyThYK7OxUrzHiE6shT0Oxh60,33467
91
+ dtlpy/entities/service.py,sha256=NI4lFC6FqLw4aEGarr2rMptxe3omVfC39C9VAnYYEJA,33733
92
92
  dtlpy/entities/setting.py,sha256=uXagJHtcCR3nJYClR_AUGZjz_kx3TejPcUZ8ginHFIA,8561
93
- dtlpy/entities/task.py,sha256=XHiEqZYFlrDCtmw1MXsysjoBLdIzAk7coMrVk8bNIiE,19534
93
+ dtlpy/entities/task.py,sha256=SL1-6p4jruELkWI-5VXBMn7Imj1xJVaOfAFDa7inH64,19544
94
94
  dtlpy/entities/time_series.py,sha256=336jWNckjuSn0G29WJFetB7nBoFAKqs4VH9_IB4m4FE,4017
95
- dtlpy/entities/trigger.py,sha256=zh3wYUY2-zATh_7ous0Ck87Yojo9r9PAVQrkcESxoko,14266
95
+ dtlpy/entities/trigger.py,sha256=Spf5G3n1PsD3mDntwbAsc-DpEGDlqKgU9ec0Q0HinsQ,14286
96
96
  dtlpy/entities/user.py,sha256=hqEzwN6rl1oUTpKOV5eXvw9Z7dtpsiC4TAPSNBmkqcM,3865
97
97
  dtlpy/entities/webhook.py,sha256=6R06MgLxabvKySInGlSJmaf0AVmAMe3vKusWhqONRyU,3539
98
98
  dtlpy/entities/annotation_definitions/__init__.py,sha256=qZ77hGmCQopPSpiDHYhNWbNKC7nrn10NWNlim9dINmg,666
@@ -148,7 +148,7 @@ dtlpy/miscellaneous/list_print.py,sha256=leEg3RodgYfH5t_0JG8VuM8NiesR8sJLK_mRStt
148
148
  dtlpy/miscellaneous/zipping.py,sha256=GMdPhAeHQXeMS5ClaiKWMJWVYQLBLAaJUWxvdYrL4Ro,5337
149
149
  dtlpy/ml/__init__.py,sha256=vPkyXpc9kcWWZ_PxyPEOsjKBJdEbowLkZr8FZIb_OBM,799
150
150
  dtlpy/ml/base_feature_extractor_adapter.py,sha256=iiEGYAx0Rdn4K46H_FlKrAv3ebTXHSxNVAmio0BxhaI,1178
151
- dtlpy/ml/base_model_adapter.py,sha256=y7txv5JdTpVCZY6IvYXiQRSz4k3YXhHFEKoDTkcVHz0,50610
151
+ dtlpy/ml/base_model_adapter.py,sha256=FMM1LknEYN69yW12A1f0x6BywdxKteVE_9MuQiryDYo,50622
152
152
  dtlpy/ml/metrics.py,sha256=BG2E-1Mvjv2e2No9mIJKVmvzqBvLqytKcw3hA7wVUNc,20037
153
153
  dtlpy/ml/predictions_utils.py,sha256=He_84U14oS2Ss7T_-Zj5GDiBZwS-GjMPURUh7u7DjF8,12484
154
154
  dtlpy/ml/summary_writer.py,sha256=dehDi8zmGC1sAGyy_3cpSWGXoGQSiQd7bL_Thoo8yIs,2784
@@ -161,12 +161,12 @@ dtlpy/repositories/artifacts.py,sha256=Ke2ustTNw-1eQ0onLsWY7gL2aChjXPAX5p1uQ_EzM
161
161
  dtlpy/repositories/assignments.py,sha256=1VwJZ7ctQe1iaDDDpeYDgoj2G-TCgzolVLUEqUocd2w,25506
162
162
  dtlpy/repositories/bots.py,sha256=q1SqH01JHloljKxknhHU09psV1vQx9lPhu3g8mBBeRg,8104
163
163
  dtlpy/repositories/codebases.py,sha256=pvcZxdrq0-zWysVbdXjUOhnfcF6hJD8v5VclNZ-zhGA,24668
164
- dtlpy/repositories/commands.py,sha256=kXhmyBpLZNs-6vKBo4iXaommpjcGBDXs287IICUnQMw,5593
164
+ dtlpy/repositories/commands.py,sha256=i6gQgOmRDG8ixqKU7672H3CvGt8VLT3ihDVfri1eWWc,5610
165
165
  dtlpy/repositories/compositions.py,sha256=H417BvlQAiWr5NH2eANFke6CfEO5o7DSvapYpf7v5Hk,2150
166
166
  dtlpy/repositories/computes.py,sha256=EtfE_3JhTdNlSYDPkKXBFkq-DBl4sgQqIm50ajvFdWM,9976
167
- dtlpy/repositories/datasets.py,sha256=rDpJXNyxOlJwDQB-wNkM-JIqOGH10q9nujnAl6y8_xU,52077
168
- dtlpy/repositories/downloader.py,sha256=h5Gs_hVXIOobzdwTHgLfkJYWiwtbRn3my-QMWnWJccw,44082
169
- dtlpy/repositories/dpks.py,sha256=mj3QPvfzj_jZAscwIgpKUfa7fLxptc3OJQ_RrSfgYxo,17487
167
+ dtlpy/repositories/datasets.py,sha256=SpG86uToq-E5nVHMwHgWx6VwwwkgfYo8x5vZ0WA3Ouw,56546
168
+ dtlpy/repositories/downloader.py,sha256=rtgGj6jAfXxHZ1oihFoOkK4MUtapFpVMdREKzXKLnu0,44113
169
+ dtlpy/repositories/dpks.py,sha256=dglvaiSFBvEithhlQ0RAXwzTxoZaICONs-owx3e2nfU,17848
170
170
  dtlpy/repositories/drivers.py,sha256=fF0UuHCyBzop8pHfryex23mf0kVFAkqzNdOmwBbaWxY,10204
171
171
  dtlpy/repositories/executions.py,sha256=4UoU6bnB3kl5cMuF1eJvDecfZCaB06gKWxPfv6_g1_k,32598
172
172
  dtlpy/repositories/feature_sets.py,sha256=UowMDAl_CRefRB5oZzubnsjU_OFgiPPdQXn8q2j4Kuw,9666
@@ -174,7 +174,7 @@ dtlpy/repositories/features.py,sha256=7xA2ihEuNgZD7HBQMMGLWpsS2V_3PgieKW2YAk1OeU
174
174
  dtlpy/repositories/integrations.py,sha256=Wi-CpT2PH36GFu3znWP5Uf2CmkqWBUYyOdwvatGD_eM,11798
175
175
  dtlpy/repositories/items.py,sha256=90Z8-thLWBd49fmmnP-P6pZxhHX1k4Wv6Qfxq-Ovcz4,38092
176
176
  dtlpy/repositories/messages.py,sha256=QU0Psckg6CA_Tlw9AVxqa-Ay1fRM4n269sSIJkH9o7E,3066
177
- dtlpy/repositories/models.py,sha256=HGDyV0kUNeH96QgjOcjigg7KV0-NubFLLapmiCVeNik,37889
177
+ dtlpy/repositories/models.py,sha256=IekNMcnuKVaAVTJf2AJv6YvX5qCd9kkSl4ETPMWP4Zc,38213
178
178
  dtlpy/repositories/nodes.py,sha256=xXJm_YA0vDUn0dVvaGeq6ORM0vI3YXvfjuylvGRtkxo,3061
179
179
  dtlpy/repositories/ontologies.py,sha256=unnMhD2isR9DVE5S8Fg6fSDf1ZZ5Xemxxufx4LEUT3w,19577
180
180
  dtlpy/repositories/organizations.py,sha256=6ijUDFbsogfRul1g_vUB5AZOb41MRmV5NhNU7WLHt3A,22825
@@ -185,17 +185,17 @@ dtlpy/repositories/projects.py,sha256=tZyFLqVs-8ggTIi5echlX7XdGOJGW4LzKuXke7jkRn
185
185
  dtlpy/repositories/recipes.py,sha256=ZZDhHn9g28C99bsf0nFaIpVYn6f6Jisz9upkHEkeaYY,15843
186
186
  dtlpy/repositories/resource_executions.py,sha256=PyzsbdJxz6jf17Gx13GZmqdu6tZo3TTVv-DypnJ_sY0,5374
187
187
  dtlpy/repositories/schema.py,sha256=kTKDrbwm7BfQnBAK81LpAl9ChNFdyUweSLNazlJJhjk,3953
188
- dtlpy/repositories/services.py,sha256=kG2CW4xKKO5mbFv93Km-6gxq-Zq6-6GToEtQQ0R-KWA,67489
188
+ dtlpy/repositories/services.py,sha256=2ruoPwyznRwsNtM7YK2vSGQP9jtCHB6WitRo-Z0yB_c,68576
189
189
  dtlpy/repositories/settings.py,sha256=pvqNse0ANCdU3NSLJEzHco-PZq__OIsPSPVJveB9E4I,12296
190
190
  dtlpy/repositories/tasks.py,sha256=v09S2pYGkKx_vBG7SWigJeuMhp0GsefKo3Td7ImrWb0,49039
191
191
  dtlpy/repositories/times_series.py,sha256=m-bKFEgiZ13yQNelDjBfeXMUy_HgsPD_JAHj1GVx9fU,11420
192
192
  dtlpy/repositories/triggers.py,sha256=izdNyCN1gDc5uo7AXntso0HSMTDIzGFUp-dSEz8cn_U,21990
193
193
  dtlpy/repositories/upload_element.py,sha256=R2KWIXmkp_dMAIr81tu3Y_VRfldj0ju8__V28ombkcg,10677
194
- dtlpy/repositories/uploader.py,sha256=c-YPOGd3ONCHrPJKGDNe_DiFkUiIAdeWY5Yv1Neg_rM,31306
194
+ dtlpy/repositories/uploader.py,sha256=9tbXPuBmRJN95Ifcrr-UDT-6tiKqpTiRMnuS8qcjTag,31319
195
195
  dtlpy/repositories/webhooks.py,sha256=IIpxOJ-7KeQp1TY9aJZz-FuycSjAoYx0TDk8z86KAK8,9033
196
196
  dtlpy/services/__init__.py,sha256=VfVJy2otIrDra6i7Sepjyez2ujiE6171ChQZp-YgxsM,904
197
197
  dtlpy/services/aihttp_retry.py,sha256=tgntZsAY0dW9v08rkjX1T5BLNDdDd8svtgn7nH8DSGU,5022
198
- dtlpy/services/api_client.py,sha256=_LpnqLO-9eFAMQkOk5qP3wMVcdwKyOQ-iNvC0jdvATg,69294
198
+ dtlpy/services/api_client.py,sha256=7ctRdpLX6QvJLI-xNRXqlqfrhOiGl5EynY9nrLkuJw4,73113
199
199
  dtlpy/services/api_reference.py,sha256=cW-B3eoi9Xs3AwI87_Kr6GV_E6HPoC73aETFaGz3A-0,1515
200
200
  dtlpy/services/async_utils.py,sha256=kaYHTPw0Lg8PeJJq8whPyzrBYkzD7offs5hsKRZXJm8,3960
201
201
  dtlpy/services/calls_counter.py,sha256=gr0io5rIsO5-7Cgc8neA1vK8kUtYhgFPmDQ2jXtiZZs,1036
@@ -208,7 +208,7 @@ dtlpy/services/reporter.py,sha256=4zi9-bshKAPHG2XMOXS39cFZ0mhqNc3Qa9uaMN7CSZ8,91
208
208
  dtlpy/services/service_defaults.py,sha256=a7KoqkVmn2TXmM9gN9JRaVVtcG2b8JGIieVnaZeEaao,3860
209
209
  dtlpy/utilities/__init__.py,sha256=ncQD1O5lZ7L9n9rNRBivyqNVFDZyQcmqn-X-wyQhhIs,898
210
210
  dtlpy/utilities/base_package_runner.py,sha256=tux_XCiCoOhMPtFaQludzhj0ny6OTKhyoN1aXjPal54,8522
211
- dtlpy/utilities/converter.py,sha256=8mOdKiLe1ATgR1Q56-c6d716aKGRt0A3M9AelijaYN8,74816
211
+ dtlpy/utilities/converter.py,sha256=4CDrhmG1DZPxhZf2AfB4s7-mb3apYJ7zQ4XHuY8aPH4,74856
212
212
  dtlpy/utilities/annotations/__init__.py,sha256=Eb72MloiwDQWe8H4NptFP1RZEEhcY2Fz_w_e34tdCiE,728
213
213
  dtlpy/utilities/annotations/annotation_converters.py,sha256=KOqLVtb88GnrvuVi5x-t5vtzVN9Am98RersBl_D44SU,10796
214
214
  dtlpy/utilities/dataset_generators/__init__.py,sha256=pA7UqhTh51gC407FyNa_WG8fUFnd__4tmEUTkNBlcLs,65
@@ -223,19 +223,19 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
223
223
  dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
224
224
  dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
225
225
  dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
226
- dtlpy-1.100.5.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
227
- dtlpy-1.100.5.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
228
- dtlpy-1.100.5.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
226
+ dtlpy-1.102.14.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
227
+ dtlpy-1.102.14.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
228
+ dtlpy-1.102.14.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
229
229
  tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
230
230
  tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
231
231
  tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
232
- tests/assets/models_flow/main.py,sha256=xotAjdHpFnIic3Wb-4f7GSg2igtuXZjvRPiYdCTawhA,2064
232
+ tests/assets/models_flow/main.py,sha256=vnDKyVZaae2RFpvwS22Hzi6Dt2LJerH4yQrmKtaT8_g,2123
233
233
  tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
234
234
  tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
235
- tests/features/environment.py,sha256=V23cUx_p4VpNk9kc2I0BDZJHO_xcJBFJq8m3JlYCooc,16736
236
- dtlpy-1.100.5.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
237
- dtlpy-1.100.5.dist-info/METADATA,sha256=vOiJBLBtuWXJGkaq6eI81yRcUyFEAiMM9O6EvxoWtsw,3019
238
- dtlpy-1.100.5.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
239
- dtlpy-1.100.5.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
240
- dtlpy-1.100.5.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
241
- dtlpy-1.100.5.dist-info/RECORD,,
235
+ tests/features/environment.py,sha256=oAO7H7j7Y7czW0t25Gv1KwI2-ofqhZVkbCw9LbZCp4Y,17506
236
+ dtlpy-1.102.14.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
237
+ dtlpy-1.102.14.dist-info/METADATA,sha256=8SA6BCGb20bC3aZZwWdE-vrOjyOEEvA7ecIX1aE6fJo,3020
238
+ dtlpy-1.102.14.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
239
+ dtlpy-1.102.14.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
240
+ dtlpy-1.102.14.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
241
+ dtlpy-1.102.14.dist-info/RECORD,,
@@ -1,3 +1,5 @@
1
+ import time
2
+
1
3
  import dtlpy as dl
2
4
  import logging
3
5
  import os
@@ -26,10 +28,12 @@ class ModelAdapter(dl.BaseModelAdapter):
26
28
 
27
29
  def train(self, data_path, output_path, **kwargs):
28
30
  logger.info("model training")
31
+ time.sleep(60)
29
32
  print(self.model_entity.id)
30
33
 
31
34
  def predict(self, batch, **kwargs):
32
35
  logger.info("model prediction")
36
+ time.sleep(30)
33
37
  batch_annotations = list()
34
38
 
35
39
  for img in batch:
@@ -13,6 +13,7 @@ from behave.formatter.base import StreamOpener
13
13
  import sys
14
14
 
15
15
  import dtlpy as dl
16
+ import shutil
16
17
 
17
18
  try:
18
19
  # for local import
@@ -276,6 +277,11 @@ def after_tag(context, tag):
276
277
  use_fixture(reset_setenv, context)
277
278
  except Exception:
278
279
  logging.exception('Failed to reset env')
280
+ elif tag == 'restore_json_file':
281
+ try:
282
+ use_fixture(restore_json_file, context)
283
+ except Exception:
284
+ logging.exception('Failed to restore json file')
279
285
  elif tag == 'frozen_dataset':
280
286
  pass
281
287
  elif 'testrail-C' in tag:
@@ -381,7 +387,8 @@ def delete_pipeline(context):
381
387
  while context.to_delete_pipelines_ids:
382
388
  pipeline_id = context.to_delete_pipelines_ids.pop(0)
383
389
  try:
384
- filters = context.dl.Filters(resource=context.dl.FiltersResource.EXECUTION, field='latestStatus.status', values=['created', 'in-progress'], operator='in')
390
+ filters = context.dl.Filters(resource=context.dl.FiltersResource.EXECUTION, field='latestStatus.status',
391
+ values=['created', 'in-progress'], operator='in')
385
392
  filters.add(field='pipeline.id', values=pipeline_id)
386
393
  executions = context.dl.executions.list(filters=filters)
387
394
  for execution in executions.items:
@@ -494,4 +501,15 @@ def models_delete(context):
494
501
  except:
495
502
  all_deleted = False
496
503
  logging.exception('Failed deleting model: {}'.format(model.id))
497
- assert all_deleted
504
+ assert all_deleted
505
+
506
+
507
+ def restore_json_file(context):
508
+ if not hasattr(context, 'backup_path') or not hasattr(context, 'original_path'):
509
+ assert False, 'Please make sure to set the original_path and backup_path in the context'
510
+ # Restore the file from the backup
511
+ if os.path.exists(context.backup_path):
512
+ shutil.copy(context.backup_path, context.original_path)
513
+ os.remove(context.backup_path) # Clean up the backup
514
+ else:
515
+ raise FileNotFoundError(f"Backup file not found for {context.original_path}")
File without changes