dtlpy 1.114.17__py3-none-any.whl → 1.115.44__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. dtlpy/__init__.py +1 -1
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/entities/__init__.py +1 -1
  4. dtlpy/entities/analytic.py +42 -6
  5. dtlpy/entities/codebase.py +1 -5
  6. dtlpy/entities/compute.py +12 -5
  7. dtlpy/entities/dataset.py +19 -5
  8. dtlpy/entities/driver.py +14 -2
  9. dtlpy/entities/filters.py +156 -3
  10. dtlpy/entities/item.py +9 -3
  11. dtlpy/entities/prompt_item.py +7 -1
  12. dtlpy/entities/service.py +5 -0
  13. dtlpy/ml/base_model_adapter.py +407 -263
  14. dtlpy/repositories/commands.py +1 -7
  15. dtlpy/repositories/computes.py +17 -13
  16. dtlpy/repositories/datasets.py +287 -74
  17. dtlpy/repositories/downloader.py +23 -3
  18. dtlpy/repositories/drivers.py +12 -0
  19. dtlpy/repositories/executions.py +1 -3
  20. dtlpy/repositories/features.py +31 -14
  21. dtlpy/repositories/items.py +5 -2
  22. dtlpy/repositories/models.py +16 -4
  23. dtlpy/repositories/uploader.py +22 -12
  24. dtlpy/services/api_client.py +6 -3
  25. dtlpy/services/reporter.py +1 -1
  26. {dtlpy-1.114.17.dist-info → dtlpy-1.115.44.dist-info}/METADATA +15 -12
  27. {dtlpy-1.114.17.dist-info → dtlpy-1.115.44.dist-info}/RECORD +34 -34
  28. {dtlpy-1.114.17.data → dtlpy-1.115.44.data}/scripts/dlp +0 -0
  29. {dtlpy-1.114.17.data → dtlpy-1.115.44.data}/scripts/dlp.bat +0 -0
  30. {dtlpy-1.114.17.data → dtlpy-1.115.44.data}/scripts/dlp.py +0 -0
  31. {dtlpy-1.114.17.dist-info → dtlpy-1.115.44.dist-info}/WHEEL +0 -0
  32. {dtlpy-1.114.17.dist-info → dtlpy-1.115.44.dist-info}/entry_points.txt +0 -0
  33. {dtlpy-1.114.17.dist-info → dtlpy-1.115.44.dist-info}/licenses/LICENSE +0 -0
  34. {dtlpy-1.114.17.dist-info → dtlpy-1.115.44.dist-info}/top_level.txt +0 -0
@@ -261,6 +261,7 @@ class Drivers:
261
261
  name: str,
262
262
  elastic_index: str,
263
263
  elastic_index_path: str,
264
+ integration: entities.Integration = None,
264
265
  project_id: str = None,
265
266
  allow_external_delete: bool = False
266
267
  ):
@@ -271,6 +272,7 @@ class Drivers:
271
272
 
272
273
  :param str name: The driver name
273
274
  :param str elastic_index: The elastic index for PowerScale NFS driver
275
+ :param Integration integration (optional): The S3 integration to use. If not provided, integration id will be 'system'
274
276
  :param str project_id: Optional project ID. If not provided, uses the current project
275
277
  :param bool allow_external_delete: True to allow deleting files from external storage when files are deleted in your Dataloop storage
276
278
  :param str elastic_index_path: The elastic index path for PowerScale NFS driver
@@ -289,7 +291,17 @@ class Drivers:
289
291
  )
290
292
  """
291
293
 
294
+ integration_id = 'system'
295
+ integration_type = None
296
+ if integration is not None:
297
+ if not integration.type == entities.IntegrationType.S3:
298
+ raise ValueError("Integration type must be S3 for PowerScale NFS driver")
299
+ integration_id = integration.id
300
+ integration_type = integration.type
301
+
292
302
  payload = {
303
+ "integrationId": integration_id,
304
+ "integrationType": integration_type,
293
305
  "name": name,
294
306
  "metadata": {"system": {"projectId": self.project.id if project_id is None else project_id}},
295
307
  "type": entities.ExternalStorage.POWERSCALE_NFS,
@@ -660,9 +660,7 @@ class Executions:
660
660
  f"execution wait() got timeout. id: {execution.id!r}, status: {execution.latest_status}")
661
661
  sleep_time = np.min([timeout - elapsed, backoff_factor * (2 ** num_tries), MAX_SLEEP_TIME])
662
662
  num_tries += 1
663
- logger.debug("Execution {!r} is running for {:.2f}[s] and now Going to sleep {:.2f}[s]".format(execution.id,
664
- elapsed,
665
- sleep_time))
663
+ logger.debug(f"Execution {execution.id} is running for {elapsed:.2f}[s]. Sleeping for {sleep_time:.2f}[s]")
666
664
  time.sleep(sleep_time)
667
665
 
668
666
  return execution
@@ -153,10 +153,10 @@ class Features:
153
153
  """
154
154
  Create a new Feature vector
155
155
 
156
- :param immutable value: actual vector - immutable (list of floats [1,2,3])
156
+ :param immutable value: actual vector - immutable (list of floats [1,2,3]) or list of lists of floats
157
157
  :param str project_id: the id of the project where feature will be created
158
158
  :param str feature_set_id: ref to a featureSet this vector is a part of
159
- :param entity: the entity the featureVector is linked to (item, annotation, etc)
159
+ :param entity: the entity the featureVector is linked to (item, annotation, etc) or list of entities
160
160
  :param str version: version of the featureSet generator
161
161
  :param str parent_id: optional: parent FeatureSet id - used when FeatureVector is a subFeature
162
162
  :param str org_id: the id of the org where featureVector will be created
@@ -175,19 +175,33 @@ class Features:
175
175
  raise ValueError(
176
176
  'Missing feature_set_id. Must insert the variable or create from context, e.g. feature_set.features.create()')
177
177
  feature_set_id = self._feature_set.id
178
-
179
- payload = {'project': project_id,
180
- 'entityId': entity.id,
181
- 'value': value,
182
- 'featureSetId': feature_set_id,
183
- 'datasetId': entity.dataset.id}
184
-
178
+
179
+ # Additional payload info
180
+ additional_payload = {}
185
181
  if version is not None:
186
- payload['version'] = version
182
+ additional_payload['version'] = version
187
183
  if parent_id is not None:
188
- payload['parentId'] = parent_id
184
+ additional_payload['parentId'] = parent_id
189
185
  if org_id is not None:
190
- payload['org'] = org_id
186
+ additional_payload['org'] = org_id
187
+ additional_payload['project'] = project_id
188
+ additional_payload['featureSetId'] = feature_set_id
189
+
190
+ if not isinstance(entity, list):
191
+ entity = [entity]
192
+ value = [value]
193
+
194
+ if len(value) != len(entity):
195
+ raise ValueError('The number of vectors must be equal to the number of entities')
196
+
197
+ payload = []
198
+ for (single_entity, single_value) in zip(entity, value):
199
+ entry = {'entityId': single_entity.id,
200
+ 'value': single_value,
201
+ 'datasetId': single_entity.dataset.id
202
+ }
203
+ entry.update(additional_payload)
204
+ payload.append(entry)
191
205
 
192
206
  success, response = self._client_api.gen_request(req_type="post",
193
207
  json_req=payload,
@@ -197,9 +211,12 @@ class Features:
197
211
  if not success:
198
212
  raise exceptions.PlatformException(response)
199
213
 
214
+ features = [entities.Feature.from_json(client_api=self._client_api,
215
+ _json=feature) for feature in response.json()]
200
216
  # return entity
201
- return entities.Feature.from_json(client_api=self._client_api,
202
- _json=response.json()[0])
217
+ if len(features) == 1:
218
+ return features[0]
219
+ return features
203
220
 
204
221
  @_api_reference.add(path='/features/vectors/{id}', method='delete')
205
222
  def delete(self, feature_id: str):
@@ -524,7 +524,8 @@ class Items:
524
524
  export_version=entities.ExportVersion.V1,
525
525
  dataset_lock: bool = False,
526
526
  lock_timeout_sec: int = None,
527
- export_summary: bool = False,
527
+ export_summary: bool = False,
528
+ raise_on_error: bool = False,
528
529
  ):
529
530
  """
530
531
  Download dataset items by filters.
@@ -557,6 +558,7 @@ class Items:
557
558
  :param bool filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
558
559
  :param float alpha: opacity value [0 1], default 1
559
560
  :param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
561
+ :param bool raise_on_error: raise an exception if an error occurs
560
562
  :return: generator of local_path per each downloaded item
561
563
  :rtype: generator or single item
562
564
 
@@ -596,7 +598,8 @@ class Items:
596
598
  export_version=export_version,
597
599
  dataset_lock=dataset_lock,
598
600
  lock_timeout_sec=lock_timeout_sec,
599
- export_summary=export_summary
601
+ export_summary=export_summary,
602
+ raise_on_error=raise_on_error
600
603
  )
601
604
 
602
605
  def upload(
@@ -781,13 +781,14 @@ class Models:
781
781
  client_api=self._client_api,
782
782
  project=self._project)
783
783
 
784
- def predict(self, model, item_ids, dataset_id=None):
784
+ def predict(self, model, item_ids=None, dataset_id=None,filters=None):
785
785
  """
786
786
  Run model prediction with items
787
787
 
788
788
  :param model: dl.Model entity to run the prediction.
789
789
  :param item_ids: a list of item id to run the prediction.
790
790
  :param dataset_id: a dataset id to run the prediction.
791
+ :param filters_dict: dict of filters to run the prediction.
791
792
  :return:
792
793
  """
793
794
  if len(model.metadata['system'].get('deploy', {}).get('services', [])) == 0:
@@ -795,30 +796,36 @@ class Models:
795
796
  raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
796
797
  if item_ids is None and dataset_id is None:
797
798
  raise ValueError("Need to provide either item_ids or dataset_id")
799
+ if filters is not None and dataset_id is None:
800
+ raise ValueError("If filters are provided, dataset_id is mandatory.")
798
801
  payload_input = {}
799
802
  if item_ids is not None:
800
803
  payload_input['itemIds'] = item_ids
801
804
  if dataset_id is not None:
802
805
  payload_input['datasetId'] = dataset_id
806
+ if filters is not None:
807
+ payload_input['datasetQuery'] = filters.prepare()['filter']
803
808
  payload = {'input': payload_input,
804
809
  'config': {'serviceId': model.metadata['system']['deploy']['services'][0]}}
805
-
810
+ logger.debug(f"generate post request to predict with payload {payload}")
806
811
  success, response = self._client_api.gen_request(req_type="post",
807
812
  path=f"/ml/models/{model.id}/predict",
808
813
  json_req=payload)
809
814
  if not success:
815
+ logger.error(f"failed to make API request /ml/models/{model.id}/predict with payload {payload} response {response}")
810
816
  raise exceptions.PlatformException(response)
811
817
  return entities.Execution.from_json(_json=response.json(),
812
818
  client_api=self._client_api,
813
819
  project=self._project)
814
820
 
815
- def embed(self, model, item_ids=None, dataset_id=None):
821
+ def embed(self, model, item_ids=None, dataset_id=None, filters=None):
816
822
  """
817
823
  Run model embed with items
818
824
 
819
825
  :param model: dl.Model entity to run the prediction.
820
826
  :param item_ids: a list of item id to run the embed.
821
827
  :param dataset_id: a dataset id to run the embed.
828
+ :param filters_dict: dict of filters to run the embed.
822
829
  :return: Execution
823
830
  :rtype: dtlpy.entities.execution.Execution
824
831
  """
@@ -827,18 +834,23 @@ class Models:
827
834
  raise ValueError("Model doesnt have any associated services. Need to deploy before predicting")
828
835
  if item_ids is None and dataset_id is None:
829
836
  raise ValueError("Need to provide either item_ids or dataset_id")
837
+ if filters is not None and dataset_id is None:
838
+ raise ValueError("If filters are provided, dataset_id is mandatory.")
830
839
  payload_input = {}
831
840
  if item_ids is not None:
832
841
  payload_input['itemIds'] = item_ids
833
842
  if dataset_id is not None:
834
843
  payload_input['datasetId'] = dataset_id
844
+ if filters is not None:
845
+ payload_input['datasetQuery'] = filters.prepare()['filter']
835
846
  payload = {'input': payload_input,
836
847
  'config': {'serviceId': model.metadata['system']['deploy']['services'][0]}}
837
-
848
+ logger.debug(f"generate post request to embed with payload {payload}")
838
849
  success, response = self._client_api.gen_request(req_type="post",
839
850
  path=f"/ml/models/{model.id}/embed",
840
851
  json_req=payload)
841
852
  if not success:
853
+ logger.error(f"failed to make API request /ml/models/{model.id}/embed with payload {payload} response {response}")
842
854
  raise exceptions.PlatformException(response)
843
855
  return entities.Execution.from_json(_json=response.json(),
844
856
  client_api=self._client_api,
@@ -90,6 +90,8 @@ class Uploader:
90
90
  if isinstance(local_path, pandas.DataFrame):
91
91
  futures = self._build_elements_from_df(local_path)
92
92
  else:
93
+ start_time = time.time()
94
+ logger.debug(f"Building elements from inputs started: start time: {start_time}")
93
95
  futures = self._build_elements_from_inputs(local_path=local_path,
94
96
  local_annotations_path=local_annotations_path,
95
97
  # upload options
@@ -99,6 +101,7 @@ class Uploader:
99
101
  item_metadata=item_metadata,
100
102
  export_version=export_version,
101
103
  item_description=item_description)
104
+ logger.debug(f"Building elements from inputs completed: time taken: {time.time() - start_time}")
102
105
  num_files = len(futures)
103
106
  while futures:
104
107
  futures.popleft().result()
@@ -114,14 +117,22 @@ class Uploader:
114
117
  # log error
115
118
  errors_count = self.reporter.failure_count
116
119
  if errors_count > 0:
120
+ error_text = ""
117
121
  log_filepath = self.reporter.generate_log_files()
122
+ # Get up to 5 error examples for the exception message
123
+ if self.reporter._errors:
124
+ error_examples = list(self.reporter._errors.values())[:5]
125
+ error_text = " | ".join(error_examples)
126
+ error_message = f"Errors in {errors_count} files. Errors: {error_text}"
118
127
  if log_filepath is not None:
119
- logger.warning("Errors in {n_error} files. See {log_filepath} for full log".format(
120
- n_error=errors_count, log_filepath=log_filepath))
128
+ error_message += f", see {log_filepath} for full log"
121
129
  if raise_on_error is True:
122
- raise PlatformException(error="400",
123
- message=f"Errors in {errors_count} files. See above trace for more information")
124
-
130
+ raise PlatformException(
131
+ error="400", message=error_message
132
+ )
133
+ else:
134
+ logger.warning(error_message)
135
+
125
136
  if return_as_list is True:
126
137
  # return list of items
127
138
  return list(self.reporter.output)
@@ -217,12 +228,6 @@ class Uploader:
217
228
  if remote_name is None:
218
229
  remote_name_list = [None] * len(local_path_list)
219
230
 
220
- try:
221
- driver_path = self.items_repository.dataset.project.drivers.get(
222
- driver_id=self.items_repository.dataset.driver).path
223
- except Exception:
224
- driver_path = None
225
-
226
231
  futures = deque()
227
232
  total_size = 0
228
233
  for upload_item_element, remote_name, upload_annotations_element in zip(local_path_list,
@@ -264,7 +269,7 @@ class Uploader:
264
269
  'root': None,
265
270
  'export_version': export_version,
266
271
  'item_description': item_description,
267
- 'driver_path': driver_path
272
+ 'driver_path': None
268
273
  }
269
274
  if isinstance(upload_item_element, str):
270
275
  with_head_folder = True
@@ -290,6 +295,11 @@ class Uploader:
290
295
  upload_elem = upload_element.FileUploadElement(all_upload_elements=all_upload_elements)
291
296
 
292
297
  elif upload_item_element.startswith('external://'):
298
+ try:
299
+ driver_path = repositories.Drivers.get(driver_id=self.items_repository.dataset.driver).path
300
+ all_upload_elements['driver_path'] = driver_path
301
+ except Exception:
302
+ logger.error("Attempting to upload external item without driver path. This may cause issues.")
293
303
  upload_elem = upload_element.ExternalItemUploadElement(all_upload_elements=all_upload_elements)
294
304
 
295
305
  elif self.is_url(upload_item_element):
@@ -144,7 +144,7 @@ class Verbose:
144
144
  self._disable_progress_bar = dictionary.get('disable_progress_bar', self.__DEFAULT_DISABLE_PROGRESS_BAR)
145
145
  self._print_all_responses = dictionary.get('print_all_responses', self.__DEFAULT_PRINT_ALL_RESPONSES)
146
146
  self._print_error_logs = dictionary.get('print_error_logs', self.__PRINT_ERROR_LOGS)
147
- progress_bar_settings = dictionary.get('progress_bar_setting', None)
147
+ progress_bar_settings = dictionary.get('progress_bar_setting', None)
148
148
  if progress_bar_settings is None:
149
149
  self._progress_bar_settings = self.__DEFAULT_PROGRESS_BAR_SETTINGS
150
150
  else:
@@ -430,6 +430,7 @@ class Attributes2:
430
430
  os.environ["USE_ATTRIBUTE_2"] = json.dumps(val)
431
431
  self.to_cookie()
432
432
 
433
+
433
434
  class Decorators:
434
435
  @staticmethod
435
436
  def token_expired_decorator(method):
@@ -545,6 +546,8 @@ class ApiClient:
545
546
  self.event_tracker = Events(client_api=self)
546
547
  self.event_tracker.daemon = True
547
548
  self.event_tracker.start()
549
+ self.upload_session_timeout = int(os.environ.get('UPLOAD_SESSION_TIMEOUT', 0))
550
+ self.upload_chunk_timeout = int(os.environ.get('UPLOAD_CHUNK_TIMEOUT', 2 * 60))
548
551
 
549
552
  @property
550
553
  def event_loop(self):
@@ -1208,7 +1211,7 @@ class ApiClient:
1208
1211
  def callback(bytes_read):
1209
1212
  pass
1210
1213
 
1211
- timeout = aiohttp.ClientTimeout(total=0)
1214
+ timeout = aiohttp.ClientTimeout(total=self.upload_session_timeout)
1212
1215
  async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:
1213
1216
  try:
1214
1217
  form = aiohttp.FormData({})
@@ -1221,7 +1224,7 @@ class ApiClient:
1221
1224
  form.add_field('file', AsyncUploadStream(buffer=to_upload,
1222
1225
  callback=callback,
1223
1226
  name=uploaded_filename,
1224
- chunk_timeout=2 * 60))
1227
+ chunk_timeout=self.upload_chunk_timeout))
1225
1228
  url = '{}?mode={}'.format(self.base_gate_url + remote_url, mode)
1226
1229
 
1227
1230
  # use SSL context
@@ -214,7 +214,7 @@ class Reporter:
214
214
  os.mkdir(reports_dir)
215
215
  log_filepath = os.path.join(reports_dir,
216
216
  "log_{}_{}.json".format(self._resource,
217
- datetime.datetime.now(datetime.timezone.utc).strftime("%Y%m%d_%H%M%S")))
217
+ datetime.datetime.now(datetime.timezone.utc).strftime("%Y%m%d_%H%M%S%f"))) # Added %f to the format to include microseconds
218
218
  errors_json = dict()
219
219
  if self.cache_mode == 'diskcache':
220
220
  err_cache = self._reports['errors']
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dtlpy
3
- Version: 1.114.17
3
+ Version: 1.115.44
4
4
  Summary: SDK and CLI for Dataloop platform
5
5
  Home-page: https://github.com/dataloop-ai/dtlpy
6
6
  Author: Dataloop Team
@@ -42,6 +42,7 @@ Requires-Dist: redis>=3.5
42
42
  Requires-Dist: inquirer
43
43
  Requires-Dist: dtlpymetrics
44
44
  Requires-Dist: dataclasses
45
+ Requires-Dist: bson
45
46
  Dynamic: author
46
47
  Dynamic: author-email
47
48
  Dynamic: classifier
@@ -64,7 +65,7 @@ Dynamic: summary
64
65
  [![License](https://img.shields.io/github/license/dataloop-ai/dtlpy.svg)](https://github.com/dataloop-ai/dtlpy/blob/master/LICENSE)
65
66
  [![Downloads](https://static.pepy.tech/personalized-badge/dtlpy?period=total&units=international_system&left_color=grey&right_color=green&left_text=Downloads)](https://pepy.tech/project/dtlpy)
66
67
 
67
- 📚 [Platform Documentation](https://dataloop.ai/docs) | 📖 [SDK Documentation](https://console.dataloop.ai/sdk-docs/latest) | [Developer docs](https://developers.dataloop.ai/)
68
+ 📚 [Platform Documentation](https://dataloop.ai/docs) | 📖 [SDK Documentation](https://sdk-docs.dataloop.ai/en/latest/) | [Developer docs](https://developers.dataloop.ai/)
68
69
 
69
70
  An open-source SDK and CLI toolkit to interact seamlessly with the [Dataloop.ai](https://dataloop.ai/) platform, providing powerful data management, annotation capabilities, and workflow automation.
70
71
 
@@ -72,15 +73,17 @@ An open-source SDK and CLI toolkit to interact seamlessly with the [Dataloop.ai]
72
73
 
73
74
  ## **Table of Contents**
74
75
 
75
- - [Overview](#overview)
76
- - [Installation](#installation)
77
- - [Usage](#usage)
78
- - [SDK Usage](#sdk-usage)
79
- - [CLI Usage](#cli-usage)
80
- - [Python Version Support](#python-version-support)
81
- - [Development](#development)
82
- - [Resources](#resources)
83
- - [Contribution Guidelines](#contribution-guidelines)
76
+ - [**DTLPY – SDK and CLI for Dataloop.ai**](#dtlpy--sdk-and-cli-for-dataloopai)
77
+ - [**Table of Contents**](#table-of-contents)
78
+ - [**Overview**](#overview)
79
+ - [**Installation**](#installation)
80
+ - [**Usage**](#usage)
81
+ - [**SDK Usage**](#sdk-usage)
82
+ - [**CLI Usage**](#cli-usage)
83
+ - [**Python Version Support**](#python-version-support)
84
+ - [**Development**](#development)
85
+ - [**Resources**](#resources)
86
+ - [**Contribution Guidelines**](#contribution-guidelines)
84
87
 
85
88
  ---
86
89
 
@@ -164,7 +167,7 @@ pip install -r requirements.txt
164
167
  ## **Resources**
165
168
 
166
169
  - [Dataloop Platform](https://console.dataloop.ai)
167
- - [Full SDK Documentation](https://console.dataloop.ai/sdk-docs/latest)
170
+ - [Full SDK Documentation](https://sdk-docs.dataloop.ai/en/latest/)
168
171
  - [Platform Documentation](https://dataloop.ai/docs)
169
172
  - [SDK Examples and Tutorials](https://github.com/dataloop-ai/dtlpy-documentation)
170
173
  - [Developer docs](https://developers.dataloop.ai/)
@@ -1,5 +1,5 @@
1
- dtlpy/__init__.py,sha256=Kpyd5B6_bVW7O2l5EPnFxXJzHiobRvFbIUDeYT7kvh0,21054
2
- dtlpy/__version__.py,sha256=gcXc_lfZDPnkaeitqSccSQsJ3S5zAuZvACtFIRwuZ1s,22
1
+ dtlpy/__init__.py,sha256=kwOohJVqO34IbRMX2PrFD3C2HO9Jag7ydhAbFeAfTWQ,21084
2
+ dtlpy/__version__.py,sha256=LZECPLBGAM1qCAHhMALaViECtFy_6heC_ndQPg895SE,22
3
3
  dtlpy/exceptions.py,sha256=3-QaX1marJ1oCNNnEgnvqPlhHq8mVKjufI157pzxGZU,2996
4
4
  dtlpy/new_instance.py,sha256=6M2jZrQ6E9zfef4HOthiC9jzpQsRaLrOLu0IKw0yg0I,10234
5
5
  dtlpy/assets/__init__.py,sha256=AzcOoxymAF5O2ujLGtBIXWcA_AZx20Z0BzKLQsQWw94,1002
@@ -44,8 +44,8 @@ dtlpy/dlp/dlp,sha256=R99eKZP_pZ7egsYawtuw2lRL_6b0P7spq7JpGAY3H7Y,11
44
44
  dtlpy/dlp/dlp.bat,sha256=2V2sDEhvV2Tm7nMoyFdIxo5ODq-msgt9LJesLoPVmX0,38
45
45
  dtlpy/dlp/dlp.py,sha256=XnUyp3q0ACldaq0tPpG2UwDsk3yQNaE_nJ3QgJjHgnc,4532
46
46
  dtlpy/dlp/parser.py,sha256=2tqkCNjRY-q8Aaua7jmyXY53W9ItU5IEaZ_Pqma_JBc,31202
47
- dtlpy/entities/__init__.py,sha256=IriVIgxVS5WIqeIktdkoxUuxzFTUZhvz6o5QLw770sg,5083
48
- dtlpy/entities/analytic.py,sha256=WLHiBqP1wGa1xhzRPk4W3yCjdGflRM7aVwJvU7qAhso,12236
47
+ dtlpy/entities/__init__.py,sha256=wPZZSVUSYeSJH_nmvZOgQLaPA7AaDYut9iSxHQmMXqo,5101
48
+ dtlpy/entities/analytic.py,sha256=-koK0WQF0qSicMgektaz1nzBymC_SvWHn20xmrTHm8w,14086
49
49
  dtlpy/entities/annotation.py,sha256=CfNlbCNoCbsjorPhdBs5NDrEIB5qeO863lrmmfa_K64,68540
50
50
  dtlpy/entities/annotation_collection.py,sha256=mNEvztCL3Yr0vj0cwfdGb84lHDUFW0E9nRomDheHyU4,30537
51
51
  dtlpy/entities/app.py,sha256=vkUr08z8GnC-5vhfK9ERhXd1I5Jf6KIJU4gmJ6EUdzU,7216
@@ -54,21 +54,21 @@ dtlpy/entities/artifact.py,sha256=s23DzZpVc6QMAGvNRAuN4K3U2nKuSrY8gVD49XdyUhw,58
54
54
  dtlpy/entities/assignment.py,sha256=OHNnRaXt07xagvkJhF0kAdllkuEBywdrVdAn7tvR4Yg,15121
55
55
  dtlpy/entities/base_entity.py,sha256=zGcfjcs_Cm8FyRwZjmQPQK0-Esnrz5H3yydqFTMBa2k,6732
56
56
  dtlpy/entities/bot.py,sha256=dfuw0CkGzcwjb1Ov0KpIs0mIW5d6piHHzI0ZHjL7Vek,3932
57
- dtlpy/entities/codebase.py,sha256=fGt8KQVc1nASxsDXdIyQ7vCtfZNbxAdZvTy6zXdXpso,9295
57
+ dtlpy/entities/codebase.py,sha256=d5ZzA3N7npMQsIAG1meLAsvBtkLlPlOR7PZSBeAwqNQ,9156
58
58
  dtlpy/entities/collection.py,sha256=LqVLG6-5vZmno7FtXveBW5FcrtpSN96hXFYfA94Hp34,1198
59
59
  dtlpy/entities/command.py,sha256=dQJuqo20_qmcUpJAJkRnM5l4bNFNlaxviCo_rIaVV6U,5470
60
- dtlpy/entities/compute.py,sha256=hB8DSMNNp3l4E9xF1I7tSeapqOMGOIitwKK_6q_5-Vg,15026
61
- dtlpy/entities/dataset.py,sha256=J3BO4vE_BaVcTz0GMM_yilkaqoeV-X5NSFT77APeU0o,55123
60
+ dtlpy/entities/compute.py,sha256=Wmc5bGhXCSMdaMypUA2_SmsEv_8PaWlAWxLZTPTUz0A,15452
61
+ dtlpy/entities/dataset.py,sha256=yCnK6Q5j0fowQ6R1SjR2r94AOsHIMxhOFCfPSDr9218,55932
62
62
  dtlpy/entities/directory_tree.py,sha256=j0-kIa--HBaAZKE14b-SoI3JaZkmYnvtpkBt4vSo8_Q,1230
63
63
  dtlpy/entities/dpk.py,sha256=fnS0qV6wXCmGrc5HJJM0NxfpT_KDwMMpXzhCWweYjgk,18393
64
- dtlpy/entities/driver.py,sha256=j6vlQYnKFuZ6yv5NeW7ufBV5jG23FGRHLy9pPu41MrU,7458
64
+ dtlpy/entities/driver.py,sha256=fA-XQQk6LKUFOfMNmGlZsfVI4yqoMWUYtBBRTTFGQR4,7955
65
65
  dtlpy/entities/execution.py,sha256=WavoDhkDo6c7P8TubxHoEhTbITB4ILc9kbLvRFGVDbc,14070
66
66
  dtlpy/entities/feature.py,sha256=kRiPN8v9v6cL-mHGlQK5l9YsIMTn8HdVEkoZMKKX0Ds,3856
67
67
  dtlpy/entities/feature_set.py,sha256=yp0SJTZ3X81fYpB96TLTEWCz0_tjEwBT1T9gXPBU_eQ,4588
68
- dtlpy/entities/filters.py,sha256=I3nRyUVHFe8xgrEsKpO9EI4BjFZB7Xlw3rGz8nU3edg,23321
68
+ dtlpy/entities/filters.py,sha256=12LVboZR1-97L8oV0mrMqKEjDzoM12S4yWTp_2I0i88,30197
69
69
  dtlpy/entities/gis_item.py,sha256=DdQHePdOBKPBah6Ff0ceW07b8ytODqVehjdJdbu2YxA,4089
70
70
  dtlpy/entities/integration.py,sha256=V5mIHMWUDT8KmQiZ7l-KGKp96czZqUan4q3Hj1WgD1k,6185
71
- dtlpy/entities/item.py,sha256=LP3xK1EySf9b7FiucqSkhLsz27AwVqZqunRYjSJ2TCM,35544
71
+ dtlpy/entities/item.py,sha256=Qyx022RikwGCe7rWvgRD8XUI-iyZmI3pLGVH5khprZ4,35843
72
72
  dtlpy/entities/label.py,sha256=y7kiq-FmsEc3LrXEXUPf53EZ3ijhhobAmLeJ6UUvb2c,3992
73
73
  dtlpy/entities/links.py,sha256=_WIIbRQJPcpKU_PgZknncJ_ktQINaHa2EpUxREDsXm8,2601
74
74
  dtlpy/entities/message.py,sha256=rgZgm0H73eogzMG7T4aX-tqpGMbwk2uUo-nUyC-XPyk,6040
@@ -85,11 +85,11 @@ dtlpy/entities/paged_entities.py,sha256=DeQUMqgMfeUabQR6X-nqmgQLtJ55Mi1ubBjrO96m
85
85
  dtlpy/entities/pipeline.py,sha256=_MxhGSR9nOODGcEb5t98inURPC4z3QT_BcP8yZ5n4OA,22390
86
86
  dtlpy/entities/pipeline_execution.py,sha256=xbtG99GHVwZ8YNkx-X760h7OZPhNVJB1G0p1Zvv9QZs,10217
87
87
  dtlpy/entities/project.py,sha256=KIYPcuCJq1dZ-hJXV2aXDL4N2I1BBD8YajMXCd_dnr8,15047
88
- dtlpy/entities/prompt_item.py,sha256=CmyG6XjRSgq2DkYgFMqHPlblBMFEv3HBu06QDbc1bNk,21522
88
+ dtlpy/entities/prompt_item.py,sha256=RVJv2gmjCriz-70fJzlKFqC1LpQr1w0R5ojTY0TXOHk,21676
89
89
  dtlpy/entities/recipe.py,sha256=gOyWVxWlUees1hIpHCvvjhN-5FpDMtlFb1CxsgWiAVY,12218
90
90
  dtlpy/entities/reflect_dict.py,sha256=11pCUyRsTfwSRP2vErSvhq-CdUMMEwfkGUvcDxek94k,3375
91
91
  dtlpy/entities/resource_execution.py,sha256=93Knb8xZXTOvEWks4BZSXz7ROnheNdIsD2tQ-iF3m14,5171
92
- dtlpy/entities/service.py,sha256=Y_dw7jClMGduQXWSTbnHsqYldplrCq_kmEvvHL6RLG0,34188
92
+ dtlpy/entities/service.py,sha256=A3-hCgcSrUSpATgbdnPKyi9U0wKs8BSAlB4hkduR0cs,34442
93
93
  dtlpy/entities/service_driver.py,sha256=q8SE7Ef-8xMbeduMGprVS4dnJ6ii3bj8JfVU9SoX4Pc,4034
94
94
  dtlpy/entities/setting.py,sha256=sfjJFKow-d3UxAGtiLQ9htoznusF9CJE9ph2-dFBrU0,8855
95
95
  dtlpy/entities/task.py,sha256=zwV1tqt2qEXsMRwDCFI3Ybil9F9UI9DEWq69rUFQXig,20100
@@ -150,7 +150,7 @@ dtlpy/miscellaneous/list_print.py,sha256=oUSBNVaroekOIRaXjWeTVQxSifMhsmPPxzZbSPE
150
150
  dtlpy/miscellaneous/zipping.py,sha256=-UeVNFTzF7iLo87QpISBLL1sZHnl0p5zrNmzharobd4,5513
151
151
  dtlpy/ml/__init__.py,sha256=coDyt1yOZrmc3FIFPnQ_GGUVtcr2U1OqBAcP2u8p63U,819
152
152
  dtlpy/ml/base_feature_extractor_adapter.py,sha256=V6dE9O3JRp2f6Xw0GO09mwdJFdW88Fu6_MsPXQ9m58g,1205
153
- dtlpy/ml/base_model_adapter.py,sha256=23_kaSbu0knX46ButQAOEPn8pgj5FcBpFxoD8JdKLlI,58275
153
+ dtlpy/ml/base_model_adapter.py,sha256=rJYI9Phb2PBZnpWsbww5IPmcWtBp9RBg7TpZGwdzs-0,63649
154
154
  dtlpy/ml/metrics.py,sha256=CEavVQ9FoF_iHHaQCs2tPOdA2QvRvVSY11by4wIpeyA,20498
155
155
  dtlpy/ml/predictions_utils.py,sha256=eNUaReXLDCex1y1TAxuFfQV_sCGk4iT77UJFMHMw6zI,12758
156
156
  dtlpy/ml/summary_writer.py,sha256=JuFlqJsUqTBfLlRI1HtffiJA5tcCqDbarAKuoRPnZew,2841
@@ -164,20 +164,20 @@ dtlpy/repositories/assignments.py,sha256=5pqswUueSgHJjkjdQ4Y3q0r8fOWPrCKGjJHgVel
164
164
  dtlpy/repositories/bots.py,sha256=Xbph8kNHQOPV3aQCykltYGwApgu_leTm4-a6XGtE2RI,8317
165
165
  dtlpy/repositories/codebases.py,sha256=fCYcVD5zDjhROvIXwC08Wtx-kUJsTX79X6owG1-Nnzo,25227
166
166
  dtlpy/repositories/collections.py,sha256=lg4fPtdwscNC8_T3vS52LSbphRu4XzcR7N-ihavR1Hw,13279
167
- dtlpy/repositories/commands.py,sha256=MappD0VQUjSn8OBbWcWIUODMevH_AIt6mIn_59iDvU0,6006
167
+ dtlpy/repositories/commands.py,sha256=eM8wJlCKAvRXEu9l07cyREC1lilulI48e2mziDwRepw,5869
168
168
  dtlpy/repositories/compositions.py,sha256=bw1NcZigG4v_JZstQxOWQ3UcZq_a5rQiiNx84WvPhCw,2211
169
- dtlpy/repositories/computes.py,sha256=2xwvK5LvFZoMqilpjgsK_9lchPgcOCLo1nWE44Uxbk0,16956
170
- dtlpy/repositories/datasets.py,sha256=H-j3rLPiGg7bIiV9rGHV4UHlUMlqkaqOYlrADJ_Bo3w,60649
171
- dtlpy/repositories/downloader.py,sha256=XauB3_KifBcf9ApHgdso8CF-pV7KvZeR9iHHV7V4w00,46740
169
+ dtlpy/repositories/computes.py,sha256=suCtnIOvSUGUPl_QrRtZWMq4niV7uJlxASO6dw-AQUI,17375
170
+ dtlpy/repositories/datasets.py,sha256=ngZyBagiZZD4E5JXtYUmHTQ9-BAUIZHn3dOGq2REhWM,71350
171
+ dtlpy/repositories/downloader.py,sha256=xe5uIb07BCIoyNVbNG21E7lyig1L5gTN81jQk3NpW5s,47610
172
172
  dtlpy/repositories/dpks.py,sha256=CY4Ltf_Oh0Bs7vo-5zndDLSvVneXQsrM_OhPdGwngGY,18941
173
- dtlpy/repositories/drivers.py,sha256=dI1xU3eQjEMvdb0w4IlmCPLiERaf_yVqRGpEt6Fw48Q,19399
174
- dtlpy/repositories/executions.py,sha256=O4nkq0FFhNOA7uAdqXRC52dcwhZxYMThaQfqqZCQVLc,33415
173
+ dtlpy/repositories/drivers.py,sha256=3grC9ytllyGgZNZBMZLTy1BLqQeOHxwjbtgQNyrPTMw,20035
174
+ dtlpy/repositories/executions.py,sha256=hWN1TjIXq2PZjH6L5Vdm3Fx-58k1X58ZSQbVLA2gvZk,33176
175
175
  dtlpy/repositories/feature_sets.py,sha256=UiHMceYolJJUDUEAV0z_APzCmdxCMQ3iy6RBepNfSXc,9892
176
- dtlpy/repositories/features.py,sha256=JCnzf90AHqsGA5uSe4j-iOXY7YazGFB13lbaqhuRf0M,10161
176
+ dtlpy/repositories/features.py,sha256=k0aqPdnconRaYsv4Z_vrLGvTjHOgkH0YuIwDHDyXeWw,10926
177
177
  dtlpy/repositories/integrations.py,sha256=bJ39gYcyhnlkGMd9aMBvb0L5tjoQPYOU60wUk8w4vzI,19975
178
- dtlpy/repositories/items.py,sha256=Wj-wWSeME_VxO_xAgH_I-WGkqs1plJrtTpmnN8OJGNM,40782
178
+ dtlpy/repositories/items.py,sha256=mXKSLJ44r0W5MDvfA6mpezM4KtE2ihoFRNyqF03jszc,40940
179
179
  dtlpy/repositories/messages.py,sha256=-OyZMoLLsAzHOurMmFPMZE9-FFCLd6iCeLpBCSDYjA4,3160
180
- dtlpy/repositories/models.py,sha256=BnaHlP37EmFgE7Tc1_jwplZruA23Stegc5uzL3dKANw,46003
180
+ dtlpy/repositories/models.py,sha256=VraBFhUE-k82tIQRb5rLm0cVvvuQNNPOJ3DJP7-2pUs,47074
181
181
  dtlpy/repositories/nodes.py,sha256=wvn8sqqkmLNkGl_ob1lZPt9afzDebA-QVvs1hvJCelM,3141
182
182
  dtlpy/repositories/ontologies.py,sha256=WIPutseCvKjw0c4eklMiSYHyzh7YCxxpHv-Vyvapz2U,20088
183
183
  dtlpy/repositories/organizations.py,sha256=zXJCkQ2d8sdZGt9QoGf-rXyWr2owNFsad4wo7uOFVyw,23350
@@ -195,11 +195,11 @@ dtlpy/repositories/tasks.py,sha256=UTHute3TJhI5WzXULHfHkuqSZvn0ZEdHrXmc1U6S1pM,6
195
195
  dtlpy/repositories/times_series.py,sha256=xpbs3H3FTUolPy157xxOSlI7KHfqATtENVOGMhGw5KM,11698
196
196
  dtlpy/repositories/triggers.py,sha256=_eMVkmdOnS_5XeqbGGOSmXsesuHlC6mnslelljZjMJY,22526
197
197
  dtlpy/repositories/upload_element.py,sha256=nf8yz0NBHpla8Irn6fIbxrD00qXcNfNTmpvKOVmSRjs,10934
198
- dtlpy/repositories/uploader.py,sha256=DH0ephkWAT7w0QqYND2FsdHrV8PElwRy2u8IMBI8chQ,32715
198
+ dtlpy/repositories/uploader.py,sha256=5hI2QMRJbI5V1bR2Xh5bDQwZncCqFL8QsLGwN5ErEg8,33356
199
199
  dtlpy/repositories/webhooks.py,sha256=rdjsqt8geo1Dp-xMIAtk3SRe7uAWw9JZ-FjS45v2C30,9282
200
200
  dtlpy/services/__init__.py,sha256=BA5Wr0xWhBqSy8ayz1lu0aQnhH1JP6LA8e2UokJPP4M,926
201
201
  dtlpy/services/aihttp_retry.py,sha256=k27de_S2sKAWTGjOm6qfiipwLDh0iislksbzTE2voaw,5153
202
- dtlpy/services/api_client.py,sha256=KRiVB3nUrGCWtQcmJXW0g22yvbQp-V09cXQKn6OZqcA,73246
202
+ dtlpy/services/api_client.py,sha256=N5975tlKE-8qVYrHXVO3Va4ATjuvQAyKgrHMgV6X1Es,73469
203
203
  dtlpy/services/api_reference.py,sha256=jlP-7JvxILf3_fNni7aSXf8iFPPXRks3KDxmQht4qpE,1555
204
204
  dtlpy/services/async_utils.py,sha256=V46H_LIF2xp3kQ-JJ1DomKlV_4hamdqsuS3ZsWuzGlI,4093
205
205
  dtlpy/services/calls_counter.py,sha256=HXCUY7CfbQGjz3DKiYzV4smK5O04N9wbbg7kfwGUFgk,1080
@@ -208,7 +208,7 @@ dtlpy/services/cookie.py,sha256=oXuLcJ8wrGOpuqsifCbKR9NoMCKnHvkivgRz-sZ5sbw,3809
208
208
  dtlpy/services/create_logger.py,sha256=zHy1A-qaNYjjRLL2fMMoLjqj_IwMnSpgeeXxo5ztjpI,6506
209
209
  dtlpy/services/events.py,sha256=hy47MkkBfzeNxd-pKW8svR3qzF80oKzl1XFdmf6hl7A,3770
210
210
  dtlpy/services/logins.py,sha256=C801uQps4rvWzrJ_DwkWnTH-vQDFyTd3h9bgY2FRIHs,9001
211
- dtlpy/services/reporter.py,sha256=-TpkDYeXgn7dZvgZLlxs52-mmJ6tNyJKZlvuiKsOhO8,9396
211
+ dtlpy/services/reporter.py,sha256=VDUb4oW0DR09awTetPn82ODAQ9ATcoDvMGZlXWbqiuk,9447
212
212
  dtlpy/services/service_defaults.py,sha256=QPhpQq6AvFi2XmOfSx-2asymrRaZ8kRTDq014ofI5UM,3951
213
213
  dtlpy/utilities/__init__.py,sha256=nsADzasz6BmK6YIhblbqHYm6_WT467QvSCP_03iG7yM,918
214
214
  dtlpy/utilities/base_package_runner.py,sha256=utKJx3-ZIK1PRDw-pr-oXgIpJWwnZD24OTzdNvbCooc,8786
@@ -227,14 +227,14 @@ dtlpy/utilities/reports/report.py,sha256=zer2AqDmmuuu_A31TjHI9yS4RnjAlIeQZ5xWzOM
227
227
  dtlpy/utilities/videos/__init__.py,sha256=erjgtnnSJYk3k9j4PGzJwu3Ohv7H7DMnKySvJUQoaQs,751
228
228
  dtlpy/utilities/videos/video_player.py,sha256=KJiMcrGo17qwaSnOF-SsccAR8BVChC_HMgTV6zW1bJ8,24670
229
229
  dtlpy/utilities/videos/videos.py,sha256=PjPx_2hwAxctoz96XBd407ds8JZpzur4Z54rtJlVO-8,22345
230
- dtlpy-1.114.17.data/scripts/dlp,sha256=R99eKZP_pZ7egsYawtuw2lRL_6b0P7spq7JpGAY3H7Y,11
231
- dtlpy-1.114.17.data/scripts/dlp.bat,sha256=2V2sDEhvV2Tm7nMoyFdIxo5ODq-msgt9LJesLoPVmX0,38
232
- dtlpy-1.114.17.data/scripts/dlp.py,sha256=sN0lxOiBmYOVijl1vCm3wkyr2TGas75AGaHV7pxt6oo,4521
233
- dtlpy-1.114.17.dist-info/licenses/LICENSE,sha256=WtjCEwlcVzkh1ziO35P2qfVEkLjr87Flro7xlHz3CEY,11556
230
+ dtlpy-1.115.44.data/scripts/dlp,sha256=R99eKZP_pZ7egsYawtuw2lRL_6b0P7spq7JpGAY3H7Y,11
231
+ dtlpy-1.115.44.data/scripts/dlp.bat,sha256=2V2sDEhvV2Tm7nMoyFdIxo5ODq-msgt9LJesLoPVmX0,38
232
+ dtlpy-1.115.44.data/scripts/dlp.py,sha256=sN0lxOiBmYOVijl1vCm3wkyr2TGas75AGaHV7pxt6oo,4521
233
+ dtlpy-1.115.44.dist-info/licenses/LICENSE,sha256=WtjCEwlcVzkh1ziO35P2qfVEkLjr87Flro7xlHz3CEY,11556
234
234
  tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
235
235
  tests/features/environment.py,sha256=FLJTz1dYMfeWauwRHED_weESoOKc_ZfcgvGiWoS2lYA,19484
236
- dtlpy-1.114.17.dist-info/METADATA,sha256=swwsC6oynjG1l3tHvgvss4qsQ8agx6nCSyQz54izgOo,5823
237
- dtlpy-1.114.17.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
238
- dtlpy-1.114.17.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
239
- dtlpy-1.114.17.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
240
- dtlpy-1.114.17.dist-info/RECORD,,
236
+ dtlpy-1.115.44.dist-info/METADATA,sha256=9AGLtnzkuzZ50dBYDqiu8SAKGM5Rj_mvCU2SYCVy8q8,6022
237
+ dtlpy-1.115.44.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
238
+ dtlpy-1.115.44.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
239
+ dtlpy-1.115.44.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
240
+ dtlpy-1.115.44.dist-info/RECORD,,
File without changes