dtlpy 1.111.11__py3-none-any.whl → 1.113.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.111.11'
1
+ version = '1.113.10'
dtlpy/entities/compute.py CHANGED
@@ -12,6 +12,7 @@ class ClusterProvider(str, Enum):
12
12
  HPC = 'hpc'
13
13
  LOCAL = 'local'
14
14
  RANCHER_K3S = 'rancher-k3s'
15
+ RANCHER_RKE = 'rancher-rke'
15
16
 
16
17
 
17
18
  class ComputeType(str, Enum):
@@ -327,8 +328,8 @@ class Compute:
327
328
  self._computes = repositories.Computes(client_api=self._client_api)
328
329
  return self._computes
329
330
 
330
- def delete(self):
331
- return self.computes.delete(compute_id=self.id)
331
+ def delete(self, skip_destroy: bool = False):
332
+ return self.computes.delete(compute_id=self.id, skip_destroy=skip_destroy)
332
333
 
333
334
  def update(self):
334
335
  return self.computes.update(compute=self)
@@ -111,9 +111,12 @@ class Integration(entities.BaseEntity):
111
111
  raise ValueError('Must input a valid Project entity')
112
112
  self._project = project
113
113
 
114
- def update(self,
115
- new_name: str = None,
116
- new_options: dict = None):
114
+ def update(
115
+ self,
116
+ new_name: str = None,
117
+ new_options: dict = None,
118
+ reload_services: bool = None
119
+ ):
117
120
  """
118
121
  Update the integration's name.
119
122
 
@@ -121,6 +124,7 @@ class Integration(entities.BaseEntity):
121
124
 
122
125
  :param str new_name: new name
123
126
  :param dict new_options: new value
127
+ :param bool reload_services: reload services associated with this integration
124
128
  :return: Integration object
125
129
  :rtype: dtlpy.entities.integration.Integration
126
130
 
@@ -148,10 +152,13 @@ class Integration(entities.BaseEntity):
148
152
  error='400',
149
153
  message='Must provide an identifier in inputs')
150
154
 
151
- identifier.integrations.update(new_name=new_name,
152
- integrations_id=self.id,
153
- integration=self,
154
- new_options=new_options)
155
+ identifier.integrations.update(
156
+ new_name=new_name,
157
+ integrations_id=self.id,
158
+ integration=self,
159
+ new_options=new_options,
160
+ reload_services=reload_services
161
+ )
155
162
 
156
163
  def delete(self,
157
164
  sure: bool = False,
dtlpy/entities/model.py CHANGED
@@ -101,6 +101,7 @@ class Model(entities.BaseEntity):
101
101
  scope = attr.ib()
102
102
  version = attr.ib()
103
103
  context = attr.ib()
104
+ status_logs = attr.ib()
104
105
 
105
106
  # name change
106
107
  package_id = attr.ib(repr=False)
@@ -199,7 +200,8 @@ class Model(entities.BaseEntity):
199
200
  output_type=_json.get('outputType', None),
200
201
  module_name=_json.get('moduleName', None),
201
202
  updated_by=_json.get('updatedBy', None),
202
- app=_json.get('app', None)
203
+ app=_json.get('app', None),
204
+ status_logs=_json.get('statusLogs', []),
203
205
  )
204
206
  inst.is_fetched = is_fetched
205
207
  return inst
@@ -229,7 +231,8 @@ class Model(entities.BaseEntity):
229
231
  attr.fields(Model).input_type,
230
232
  attr.fields(Model).output_type,
231
233
  attr.fields(Model).updated_by,
232
- attr.fields(Model).app
234
+ attr.fields(Model).app,
235
+ attr.fields(Model).status_logs
233
236
  ))
234
237
  _json['packageId'] = self.package_id
235
238
  _json['datasetId'] = self.dataset_id
@@ -253,6 +256,8 @@ class Model(entities.BaseEntity):
253
256
  _json['updatedBy'] = self.updated_by
254
257
  if self.app:
255
258
  _json['app'] = self.app
259
+ if self.status_logs:
260
+ _json['statusLogs'] = self.status_logs
256
261
 
257
262
  return _json
258
263
 
@@ -295,7 +300,10 @@ class Model(entities.BaseEntity):
295
300
  def package(self):
296
301
  if self._package is None:
297
302
  try:
298
- self._package = self.packages.get(package_id=self.package_id)
303
+ if self.app:
304
+ self._package = self.dpks.get_revisions(dpk_id=self.app['dpkId'], version=self.app['dpkVersion'])
305
+ else:
306
+ self._package = self.packages.get(package_id=self.package_id)
299
307
  except Exception as e:
300
308
  error = e
301
309
  try:
@@ -6,6 +6,8 @@ import copy
6
6
  import sys
7
7
 
8
8
  import attr
9
+
10
+ from .filters import FiltersOperations, FiltersOrderByDirection, FiltersResource
9
11
  from .. import miscellaneous
10
12
  from ..services.api_client import ApiClient
11
13
 
@@ -29,6 +31,10 @@ class PagedEntities:
29
31
  total_pages_count = attr.ib(default=0)
30
32
  items_count = attr.ib(default=0)
31
33
 
34
+ # hybrid pagination
35
+ use_id_based_paging = attr.ib(default=False)
36
+ last_seen_id = attr.ib(default=None)
37
+
32
38
  # execution attribute
33
39
  _service_id = attr.ib(default=None, repr=False)
34
40
  _project_id = attr.ib(default=None, repr=False)
@@ -43,6 +49,15 @@ class PagedEntities:
43
49
  # items list
44
50
  items = attr.ib(default=miscellaneous.List(), repr=False)
45
51
 
52
+ @staticmethod
53
+ def _has_explicit_sort(flt):
54
+ """
55
+ Check if the filter has custom sort fields defined (not id/createdAt).
56
+ """
57
+ prepared = flt.prepare() if flt else {}
58
+ sort_fields = list(prepared.get("sort", {}).keys())
59
+ return bool(sort_fields and sort_fields[0] not in {"id", "createdAt"})
60
+
46
61
  def process_result(self, result):
47
62
  """
48
63
  :param result: json object
@@ -71,7 +86,8 @@ class PagedEntities:
71
86
  return self.items_count
72
87
 
73
88
  def __iter__(self):
74
- pbar = tqdm.tqdm(total=self.total_pages_count, disable=self._client_api.verbose.disable_progress_bar_iterate_pages,
89
+ pbar = tqdm.tqdm(total=self.total_pages_count,
90
+ disable=self._client_api.verbose.disable_progress_bar_iterate_pages,
75
91
  file=sys.stdout, desc="Iterate Pages")
76
92
  if self.page_offset != 0:
77
93
  # reset the count for page 0
@@ -109,18 +125,74 @@ class PagedEntities:
109
125
  if page_offset is None:
110
126
  page_offset = self.page_offset
111
127
 
112
- if self.filters is not None:
113
- filters = copy.copy(self.filters)
114
- filters.page = page_offset
115
- filters.page_size = page_size
116
- if self._list_function is None:
117
- result = self.items_repository._list(filters=filters)
128
+ if self.filters is None:
129
+ raise ValueError("Cant return page. Filters is empty")
130
+
131
+ req = copy.deepcopy(self.filters)
132
+ req.page_size = page_size
133
+
134
+ after_id = getattr(req, "after_id", None)
135
+ if after_id is not None:
136
+ delattr(req, "after_id")
137
+
138
+ enable_hybrid = getattr(self.filters, "resource", None) in [
139
+ FiltersResource.ITEM,
140
+ FiltersResource.ANNOTATION,
141
+ FiltersResource.FEATURE,
142
+ ]
143
+
144
+ prepared= req.prepare()
145
+ sort_spec= prepared.get("sort", {})
146
+ sort_dir= next(iter(sort_spec.values()), None)
147
+ order= sort_dir or FiltersOrderByDirection.ASCENDING
148
+ operator_value = (FiltersOperations.LESS_THAN if sort_dir == FiltersOrderByDirection.DESCENDING else FiltersOperations.GREATER_THAN)
149
+
150
+ if enable_hybrid and not self._has_explicit_sort(req):
151
+ req.sort_by(field="id", value=order)
152
+
153
+ if enable_hybrid and self.use_id_based_paging:
154
+ req.page = 0
155
+ if self.last_seen_id:
156
+ req.add(
157
+ field="id",
158
+ values=self.last_seen_id,
159
+ operator=operator_value,
160
+ method=FiltersOperations.AND,
161
+ )
162
+ else:
163
+ auto_hybrid = (
164
+ enable_hybrid
165
+ and not self.use_id_based_paging
166
+ and not self._has_explicit_sort(self.filters)
167
+ and self.last_seen_id is not None
168
+ )
169
+ if auto_hybrid and page_offset > 0:
170
+ req.page = 0
171
+ req.add(
172
+ field="id",
173
+ values=after_id or self.last_seen_id,
174
+ operator=operator_value,
175
+ method=FiltersOperations.AND,
176
+ )
177
+ self.use_id_based_paging = True
118
178
  else:
119
- result = self._list_function(filters=filters)
120
- items = self.process_result(result)
121
- return items
179
+ req.page = page_offset
180
+
181
+ if self._list_function is None:
182
+ result = self.items_repository._list(filters=req)
122
183
  else:
123
- raise ValueError('Cant return page. Filters is empty')
184
+ result = self._list_function(filters=req)
185
+
186
+ items = self.process_result(result)
187
+
188
+ if enable_hybrid and items and hasattr(items[-1], "id"):
189
+ self.last_seen_id = items[-1].id
190
+
191
+ if self.use_id_based_paging:
192
+ if "hasNextPage" not in result:
193
+ self.has_next_page = len(items) == page_size
194
+
195
+ return items
124
196
 
125
197
  def get_page(self, page_offset=None, page_size=None):
126
198
  """
@@ -164,7 +236,8 @@ class PagedEntities:
164
236
  def all(self):
165
237
  page_offset = 0
166
238
  page_size = 100
167
- pbar = tqdm.tqdm(total=self.items_count, disable=self._client_api.verbose.disable_progress_bar,
239
+ pbar = tqdm.tqdm(total=self.items_count,
240
+ disable=self._client_api.verbose.disable_progress_bar,
168
241
  file=sys.stdout, desc='Iterate Entity')
169
242
  total_pages = math.ceil(self.items_count / page_size)
170
243
  jobs = list()
@@ -192,4 +265,4 @@ class PagedEntities:
192
265
  self.items.print(columns=columns)
193
266
 
194
267
  def to_df(self, columns=None):
195
- return self.items.to_df(columns=columns)
268
+ return self.items.to_df(columns=columns)
@@ -313,10 +313,9 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
313
313
  self.logger.debug("Downloading subset {!r} of {}".format(subset,
314
314
  self.model_entity.dataset.name))
315
315
 
316
- annotation_filters = entities.Filters(resource=entities.FiltersResource.ANNOTATION)
317
-
318
-
316
+ annotation_filters = None
319
317
  if self.model_entity.output_type is not None and self.model_entity.output_type != "embedding":
318
+ annotation_filters = entities.Filters(resource=entities.FiltersResource.ANNOTATION, use_defaults=False)
320
319
  if self.model_entity.output_type in [entities.AnnotationType.SEGMENTATION,
321
320
  entities.AnnotationType.POLYGON]:
322
321
  model_output_types = [entities.AnnotationType.SEGMENTATION, entities.AnnotationType.POLYGON]
@@ -329,12 +328,12 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
329
328
  operator=entities.FiltersOperations.IN
330
329
  )
331
330
 
332
- if not self.configuration.get("include_model_annotations", False):
333
- annotation_filters.add(
334
- field="metadata.system.model.name",
335
- values=False,
336
- operator=entities.FiltersOperations.EXISTS
337
- )
331
+ if not self.configuration.get("include_model_annotations", False):
332
+ annotation_filters.add(
333
+ field="metadata.system.model.name",
334
+ values=False,
335
+ operator=entities.FiltersOperations.EXISTS
336
+ )
338
337
 
339
338
  ret_list = dataset.items.download(filters=filters,
340
339
  local_path=data_subset_base_path,
@@ -694,8 +693,6 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
694
693
  if output_path is not None:
695
694
  self.save_to_model(local_path=output_path, replace=True)
696
695
  logger.info('Execution failed. Setting model.status to failed')
697
- model.status = 'failed'
698
- model.update()
699
696
  raise
700
697
  return model
701
698
 
@@ -10,6 +10,7 @@ from typing import List, Optional, Dict
10
10
  from ..entities import ComputeCluster, ComputeContext, ComputeType
11
11
  from ..entities.integration import IntegrationType
12
12
  import logging
13
+ from urllib.parse import urlparse, urlencode, parse_qs, urlunparse
13
14
 
14
15
  logger = logging.getLogger(name='dtlpy')
15
16
 
@@ -133,25 +134,31 @@ class Computes:
133
134
  def __get_log_compute_progress_callback(self, compute_id: str):
134
135
  def func():
135
136
  compute = self.get(compute_id=compute_id)
136
- bootstrap_progress = compute.metadata.get('system', {}).get('bootstrapProcess', {}).get('progress', None)
137
- bootstrap_logs = compute.metadata.get('system', {}).get('bootstrapProcess', {}).get('logs', None)
137
+ bootstrap_progress = compute.metadata.get('system', {}).get('bootstrap', {}).get('progress', None)
138
+ bootstrap_logs = compute.metadata.get('system', {}).get('bootstrap', {}).get('logs', None)
138
139
  validation_progress = compute.metadata.get('system', {}).get('validation', {}).get('progress', None)
139
140
  validation_logs = compute.metadata.get('system', {}).get('validation', {}).get('logs', None)
140
- if bootstrap_progress not in [None, 100]:
141
- logger.info(f"Bootstrap in progress: {bootstrap_progress}%")
141
+ if bootstrap_progress is not None:
142
+ if 'bootstrap' not in self.log_cache.get(compute_id, {}):
143
+ logger.info(f"Bootstrap in progress:")
142
144
  last_index = len(self.log_cache.get(compute_id, {}).get('bootstrap', []))
143
145
  new_logs = bootstrap_logs[last_index:]
144
146
  if new_logs:
145
- logger.info("Bootstrap Logs: {}".format('\n'.join(new_logs)))
147
+ for log in new_logs:
148
+ logger.info(log)
149
+ logger.info(f'Bootstrap progress: {int(bootstrap_progress)}%')
146
150
  if compute_id not in self.log_cache:
147
151
  self.log_cache[compute_id] = {}
148
152
  self.log_cache[compute_id]['bootstrap'] = bootstrap_logs
149
- if validation_progress not in [None, 100]:
150
- logger.info(f"Validating created compute. Progress: {validation_progress}%")
153
+ if bootstrap_progress in [100, None] and validation_progress is not None:
154
+ if 'validation' not in self.log_cache.get(compute_id, {}):
155
+ logger.info(f"Validating created compute:")
151
156
  last_index = len(self.log_cache.get(compute_id, {}).get('validation', []))
152
157
  new_logs = validation_logs[last_index:]
153
158
  if new_logs:
154
- logger.info("Validation Logs: {}".format('\n'.join(new_logs)))
159
+ for log in new_logs:
160
+ logger.info(log)
161
+ logger.info(f'Validation progress: {int(validation_progress)}%')
155
162
  if compute_id not in self.log_cache:
156
163
  self.log_cache[compute_id] = {}
157
164
  self.log_cache[compute_id]['validation'] = validation_logs
@@ -203,17 +210,25 @@ class Computes:
203
210
 
204
211
  return compute
205
212
 
206
- def delete(self, compute_id: str):
213
+ def delete(self, compute_id: str, skip_destroy: bool = False
214
+ ):
207
215
  """
208
216
  Delete a compute
209
217
 
210
218
  :param compute_id: compute ID
219
+ :param skip_destroy: bool
211
220
  """
212
-
221
+ url_path = self._base_url + '/{}'.format(compute_id)
222
+ params_to_add = {"skipDestroy": "true" if skip_destroy else "false" }
223
+ parsed_url = urlparse(url_path)
224
+ query_dict = parse_qs(parsed_url.query)
225
+ query_dict.update(params_to_add)
226
+ new_query = urlencode(query_dict, doseq=True)
227
+ url_path = urlunparse(parsed_url._replace(query=new_query))
213
228
  # request
214
229
  success, response = self._client_api.gen_request(
215
230
  req_type='delete',
216
- path=self._base_url + '/{}'.format(compute_id)
231
+ path=url_path
217
232
  )
218
233
 
219
234
  if not success:
@@ -96,6 +96,9 @@ class Downloader:
96
96
  error='400',
97
97
  message='Unknown annotation download option: {}, please choose from: {}'.format(
98
98
  ann_option, list(entities.ViewAnnotationOptions)))
99
+ # normalize items argument: treat empty list as “no items specified”
100
+ if isinstance(items, list) and len(items) == 0:
101
+ items = None
99
102
  #####################
100
103
  # items to download #
101
104
  #####################
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ import re
2
3
 
3
4
  from .. import entities, miscellaneous, exceptions, _api_reference
4
5
  from ..services.api_client import ApiClient
@@ -151,7 +152,8 @@ class Drivers:
151
152
  allow_external_delete: bool = True,
152
153
  region: str = None,
153
154
  storage_class: str = "",
154
- path: str = ""):
155
+ path: str = "",
156
+ endpoint: str = None):
155
157
  """
156
158
  Create a storage driver.
157
159
 
@@ -167,6 +169,7 @@ class Drivers:
167
169
  :param str region: relevant only for s3 - the bucket region
168
170
  :param str storage_class: relevant only for s3
169
171
  :param str path: Optional. By default path is the root folder. Path is case sensitive integration
172
+ :param endpoint path: Optional. Custom endpoint for S3 storage. Must be in the format 'http://<hostname>:<port>' or 'https://<hostname>:<port>'.
170
173
  :return: driver object
171
174
  :rtype: dtlpy.entities.driver.Driver
172
175
 
@@ -185,6 +188,11 @@ class Drivers:
185
188
  integration_type = driver_type
186
189
  if driver_type == entities.ExternalStorage.S3:
187
190
  bucket_payload = 'bucketName'
191
+ if endpoint:
192
+ if not re.match(r'^https?://[A-Za-z0-9.-]+:\d+$', endpoint):
193
+ raise ValueError(
194
+ f"Invalid endpoint URL '{endpoint}'. Must be 'http://<hostname>:<port>' or 'https://<hostname>:<port>'."
195
+ )
188
196
  elif driver_type == entities.ExternalStorage.GCS:
189
197
  bucket_payload = 'bucket'
190
198
  else:
@@ -208,6 +216,8 @@ class Drivers:
208
216
  "allowExternalDelete": allow_external_delete,
209
217
  "creator": self._client_api.info().get('user_email')
210
218
  }
219
+ if endpoint and driver_type == entities.ExternalStorage.S3:
220
+ payload['payload']['endpoint'] = endpoint
211
221
 
212
222
  success, response = self._client_api.gen_request(req_type='post',
213
223
  path='/drivers',
@@ -111,7 +111,7 @@ class Features:
111
111
  if self._project_id is None:
112
112
  self._project_id = self.project.id
113
113
  filters.context = {"projects": [self._project_id]}
114
-
114
+
115
115
  paged = entities.PagedEntities(items_repository=self,
116
116
  filters=filters,
117
117
  page_offset=filters.page,
@@ -120,8 +120,10 @@ class Integrations:
120
120
  aws-cross - {}
121
121
  gcp-cross - {}
122
122
  gcp-workload-identity-federation - {"secret": "", "content": "{}", "clientId": ""}
123
- private-registry (ECR) - {"name": "", "spec": {"accessKeyId": "", "secretAccessKey": "", "account": "", "region": ""}}
124
- private-registry (GAR) - {"name": "", "spec": {"password": ""}} (can use generate_gar_options to generate the options)
123
+ private-registry (ECR) - can use generate_ecr_options to generate the options
124
+ private-registry (GAR) - use generate_gar_options to generate the options
125
+ private-registry (ACR) - use generate_azure_container_registry_options to generate the options
126
+ private-registry (DockerHub) - use generate_docker_hub_options to generate the options
125
127
 
126
128
  **Prerequisites**: You must be an *owner* in the organization.
127
129
 
@@ -180,6 +182,7 @@ class Integrations:
180
182
  integration: entities.Integration = None,
181
183
  new_options: dict = None,
182
184
  organization_id: str = None,
185
+ reload_services: bool = None,
183
186
  ):
184
187
  """
185
188
  Update the integration's name.
@@ -191,6 +194,7 @@ class Integrations:
191
194
  :param Integration integration: integration object
192
195
  :param dict new_options: new value
193
196
  :param str organization_id: organization id
197
+ :param bool reload_services: reload services associated with this integration
194
198
  :return: Integration object
195
199
  :rtype: dtlpy.entities.integration.Integration
196
200
 
@@ -225,7 +229,16 @@ class Integrations:
225
229
  else:
226
230
  organization_id = self.org.id
227
231
 
228
- url_path = '/orgs/{}/integrations/'.format(organization_id)
232
+ if reload_services is None:
233
+ logger.warning(
234
+ "Param reload_services was not provided. If the integration you are updating is used\n"
235
+ "in FaaS services these services will keep using the old value until updated."
236
+ )
237
+
238
+ url_path = '/orgs/{org_id}/integrations{query_params}'.format(
239
+ org_id=organization_id,
240
+ query_params='?reloadServices=true' if reload_services else ''
241
+ )
229
242
  payload = dict(integrationId=integrations_id if integrations_id is not None else integration.id)
230
243
  if new_name is not None:
231
244
  payload['name'] = new_name
@@ -355,6 +368,21 @@ class Integrations:
355
368
  """
356
369
  return IntegrationUtils.generate_docker_hub_options(username=username, password=password, email=email)
357
370
 
371
+ @staticmethod
372
+ def generate_azure_container_registry_options(username: str, password: str, location: str) -> dict:
373
+ """
374
+ Generates an Azure Container Registry JSON configuration and returns it as a base64-encoded string.
375
+
376
+ Parameters:
377
+ username (str): The Azure username.
378
+ password (str): The Azure password.
379
+ location (str): server URL of Azure Container Registry
380
+
381
+ Returns:
382
+ str: A base64-encoded string representation of the repository JSON configuration.
383
+ """
384
+ return IntegrationUtils.generate_docker_hub_options(username=username, password=password, location=location)
385
+
358
386
  @staticmethod
359
387
  def generate_ecr_options(access_key_id: str, secret_access_key: str, account: str, region: str) -> dict:
360
388
  """
@@ -426,7 +454,7 @@ class IntegrationUtils:
426
454
  )
427
455
 
428
456
  @staticmethod
429
- def generate_docker_hub_options(username: str, password: str, email: str = None) -> dict:
457
+ def generate_docker_hub_options(username: str, password: str, email: str = None, location='docker.io') -> dict:
430
458
 
431
459
  if not username:
432
460
  raise ValueError('Missing Username')
@@ -436,7 +464,7 @@ class IntegrationUtils:
436
464
  auth = IntegrationUtils.encode('{}:{}'.format(username, password))
437
465
 
438
466
  return IntegrationUtils.generate_json_key_options(
439
- location='docker.io',
467
+ location=location,
440
468
  username=username,
441
469
  password=password,
442
470
  auth=auth,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dtlpy
3
- Version: 1.111.11
3
+ Version: 1.113.10
4
4
  Summary: SDK and CLI for Dataloop platform
5
5
  Home-page: https://github.com/dataloop-ai/dtlpy
6
6
  Author: Dataloop Team
@@ -1,5 +1,5 @@
1
1
  dtlpy/__init__.py,sha256=-5fpi-yAwFdluh8QZ-sWXwNDCD97Q5BCgIs7pUDl04o,20444
2
- dtlpy/__version__.py,sha256=KBp0kUjFeeHF5b3vE1pKBkiadaOJfs-ga44MCvxCu8U,21
2
+ dtlpy/__version__.py,sha256=AHVPkR4bml10NDAI863smQZmEgiJXZehmC3rrJ_8d9I,21
3
3
  dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
4
4
  dtlpy/new_instance.py,sha256=tUCzBGaSpm9GTjRuwOkFgo3A8vopUQ-baltdJss3XlI,9964
5
5
  dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
@@ -56,7 +56,7 @@ dtlpy/entities/bot.py,sha256=is3NUCnPg56HSjsHIvFcVkymValMqDV0uHRDC1Ib-ds,3819
56
56
  dtlpy/entities/codebase.py,sha256=pwRkAq2GV0wvmzshg89IAmE-0I2Wsy_-QNOu8OV8uqc,8999
57
57
  dtlpy/entities/collection.py,sha256=FPPPfIxOsBG1ujORPJVq8uXyF8vhIqC6N4EiI9SJzl0,1160
58
58
  dtlpy/entities/command.py,sha256=5RMQYjOGLRF8JZd7QFAPyE8utsp4eZzLApI2dEAbaqo,5301
59
- dtlpy/entities/compute.py,sha256=U974uaXGCfr-TSHqPmyLeJWbwbO-3K082hS1Scapv8M,14497
59
+ dtlpy/entities/compute.py,sha256=OdlCagVR-rh5IeHsUq1EtBq001_kFMwInCJdZNHh8G0,14584
60
60
  dtlpy/entities/dataset.py,sha256=GEvBOly1M8uU--apQZ-G-78DJZzFk178LmMhEANyi0A,53838
61
61
  dtlpy/entities/directory_tree.py,sha256=Rni6pLSWytR6yeUPgEdCCRfTg_cqLOdUc9uCqz9KT-Q,1186
62
62
  dtlpy/entities/dpk.py,sha256=XrK8X8p4Ag6LMjDrDpMstY-h_yTll_sMmKTZT6bLbWE,17923
@@ -66,12 +66,12 @@ dtlpy/entities/feature.py,sha256=9fFjD0W57anOVSAVU55ypxN_WTCsWTG03Wkc3cAAj78,373
66
66
  dtlpy/entities/feature_set.py,sha256=niw4MkmrDbD_LWQu1X30uE6U4DCzmFhPTaYeZ6VZDB0,4443
67
67
  dtlpy/entities/filters.py,sha256=Cdx3BzYa8kIfvW37Gmmwiu4eH4ytfWByu8TQOBvtR2o,22644
68
68
  dtlpy/entities/gis_item.py,sha256=Uk-wMBxwcHsImjz4qOjP-EyZAohbRzN43kMpCaVjCXU,3982
69
- dtlpy/entities/integration.py,sha256=Kdy1j6-cJLW8qNmnqCmdg36phi843YDrlMqcMyMfvYk,5875
69
+ dtlpy/entities/integration.py,sha256=XraOApW9jbT6EdZraRX2In6sMbfNgEGf2V5Um2RCRqA,6001
70
70
  dtlpy/entities/item.py,sha256=WCIPHUmubIe0wva-YMm-LPQdn2S3_-Q151x49C9NEw8,34591
71
71
  dtlpy/entities/label.py,sha256=ycDYavIgKhz806plIX-64c07_TeHpDa-V7LnfFVe4Rg,3869
72
72
  dtlpy/entities/links.py,sha256=FAmEwHtsrqKet3c0UHH9u_gHgG6_OwF1-rl4xK7guME,2516
73
73
  dtlpy/entities/message.py,sha256=ApJuaKEqxATpXjNYUjGdYPu3ibQzEMo8-LtJ_4xAcPI,5865
74
- dtlpy/entities/model.py,sha256=YwjIi3MxAZoyartTvqx_qhtDKQe6zVsQuwZbYLygMxU,26898
74
+ dtlpy/entities/model.py,sha256=j7mLY38Tl2jTLfdHFOqcctb655u7drWkGc8l2CWtbyE,27321
75
75
  dtlpy/entities/node.py,sha256=RiCqG659Pb1PZNMewR-F7eNbU5tt713fiZY9xW6-Pes,39199
76
76
  dtlpy/entities/ontology.py,sha256=924g9c2ZTfr69fWd_ejrVU0C-MAUR8UAhhz6GY-IQME,32100
77
77
  dtlpy/entities/organization.py,sha256=Zm-tTHV82PvYyTNetRRXqvmvzBCbXEwS-gAENf7Zny4,9874
@@ -80,7 +80,7 @@ dtlpy/entities/package_defaults.py,sha256=wTD7Z7rGYjVy8AcUxTFEnkOkviiJaLVZYvduiU
80
80
  dtlpy/entities/package_function.py,sha256=M42Kvw9A8b6msAkv-wRNAQg_-UC2bejniCjeKDugudc,6314
81
81
  dtlpy/entities/package_module.py,sha256=cOkIITATkzzCQpE0sdPiBUisAz8ImlPG2YGZ0K7SypA,5151
82
82
  dtlpy/entities/package_slot.py,sha256=XBwCodQe618sQm0bmx46Npo94mEk-zUV7ZX0mDRcsD8,3946
83
- dtlpy/entities/paged_entities.py,sha256=grNjt2FYg4gKBlVRDkztI1BPOI4JoGeyjvmOW3BnB3k,5927
83
+ dtlpy/entities/paged_entities.py,sha256=AZNHyN5dhaZvtiYCkZsKZfJkmYHonWDBvM560zuC-TE,8419
84
84
  dtlpy/entities/pipeline.py,sha256=JtWGoCUhVszOVkBNK43fbTt446fkND4wH-Y-fN_llww,20851
85
85
  dtlpy/entities/pipeline_execution.py,sha256=EQhW4W_G1bIPShYbJSAT--1WNQuvxVQbcQ_MCHIX0KI,9938
86
86
  dtlpy/entities/project.py,sha256=ZUx8zA3mr6N145M62R3UDPCCzO1vxfyWO6vjES-bO-g,14653
@@ -149,7 +149,7 @@ dtlpy/miscellaneous/list_print.py,sha256=fBGTMXFUwDG8DD4W6HyR8BTGtbTckLf4W09quNR
149
149
  dtlpy/miscellaneous/zipping.py,sha256=JplTc8UDFvO8WaD5vKuumVLN0lU_-GtHoE0doWKtmKg,5383
150
150
  dtlpy/ml/__init__.py,sha256=vPkyXpc9kcWWZ_PxyPEOsjKBJdEbowLkZr8FZIb_OBM,799
151
151
  dtlpy/ml/base_feature_extractor_adapter.py,sha256=iiEGYAx0Rdn4K46H_FlKrAv3ebTXHSxNVAmio0BxhaI,1178
152
- dtlpy/ml/base_model_adapter.py,sha256=7L6nVzTFBfcywouq48baxHNlrFu2hZX2q7IRBYox8J4,51148
152
+ dtlpy/ml/base_model_adapter.py,sha256=Sdrg68BLwHocvk74QUYatUgA6iv8Xhql8muX2PoF2z8,51153
153
153
  dtlpy/ml/metrics.py,sha256=BG2E-1Mvjv2e2No9mIJKVmvzqBvLqytKcw3hA7wVUNc,20037
154
154
  dtlpy/ml/predictions_utils.py,sha256=He_84U14oS2Ss7T_-Zj5GDiBZwS-GjMPURUh7u7DjF8,12484
155
155
  dtlpy/ml/summary_writer.py,sha256=dehDi8zmGC1sAGyy_3cpSWGXoGQSiQd7bL_Thoo8yIs,2784
@@ -165,15 +165,15 @@ dtlpy/repositories/codebases.py,sha256=pvcZxdrq0-zWysVbdXjUOhnfcF6hJD8v5VclNZ-zh
165
165
  dtlpy/repositories/collections.py,sha256=z-nkR33rq-MzkEff7DDSBlfsI_lkCDFwQZIlMaIT5rM,13514
166
166
  dtlpy/repositories/commands.py,sha256=MgXhXxbAzBa2QJM9Z5EsQZRaZ4fGBM17ALoldxi8xYA,5848
167
167
  dtlpy/repositories/compositions.py,sha256=H417BvlQAiWr5NH2eANFke6CfEO5o7DSvapYpf7v5Hk,2150
168
- dtlpy/repositories/computes.py,sha256=U_mBeMh-Rv1xaXGSd_3fyOX7n_0SmOTcjN-M_Ca1Go0,14360
168
+ dtlpy/repositories/computes.py,sha256=Q4maajELX1p2yyCSsflyZC2eVz7JBHgLsdcldnbG3JM,15125
169
169
  dtlpy/repositories/datasets.py,sha256=p0HBbTGrxAQ8h9tJsp1jRasPbwnMAtXQ4_sIef9_590,59358
170
- dtlpy/repositories/downloader.py,sha256=XJC9FhlXgHrA8Ae9bftrbs4YKFCcZoEYJAh6Bt6zGhU,45167
170
+ dtlpy/repositories/downloader.py,sha256=X5-vspCoTW7_QZuPdaZgOSTvM7jYU0Uf7o5PELZNY9g,45329
171
171
  dtlpy/repositories/dpks.py,sha256=dxZpGloZGH6MJG9ZFff5l3GlXw6i-52n9kxL-QiHosQ,18516
172
- dtlpy/repositories/drivers.py,sha256=fF0UuHCyBzop8pHfryex23mf0kVFAkqzNdOmwBbaWxY,10204
172
+ dtlpy/repositories/drivers.py,sha256=2fMzzt0ovNeYpfrAOqz4h14C5D7GCLLA5SDj9rQ4UfI,10817
173
173
  dtlpy/repositories/executions.py,sha256=4UoU6bnB3kl5cMuF1eJvDecfZCaB06gKWxPfv6_g1_k,32598
174
174
  dtlpy/repositories/feature_sets.py,sha256=UowMDAl_CRefRB5oZzubnsjU_OFgiPPdQXn8q2j4Kuw,9666
175
- dtlpy/repositories/features.py,sha256=A_RqTJxzjTh-Wbm0uXaoTNyHSfCLbeiH38iB11p2ifY,9915
176
- dtlpy/repositories/integrations.py,sha256=gSgaVp4MkcdrJMnXVr_fl4xrzhfJba8BFbBJTuJPwXc,18159
175
+ dtlpy/repositories/features.py,sha256=HZR-sLSdwiWdbFsnuZrTDSff0oRK2hwFBQ6UK2yVAvk,9923
176
+ dtlpy/repositories/integrations.py,sha256=Y5c37fQCaIkw1p5jPEbAqytgRVXuqe771eHC1hNDE7A,19491
177
177
  dtlpy/repositories/items.py,sha256=S1OWZ6s8AbVXMiLtCfBBiYPMG8OLqdUhKMHuZWE3bnU,40029
178
178
  dtlpy/repositories/messages.py,sha256=QU0Psckg6CA_Tlw9AVxqa-Ay1fRM4n269sSIJkH9o7E,3066
179
179
  dtlpy/repositories/models.py,sha256=uYVw319dMgVoXReb9VKl0b3v0_kgetROQaf56cvgwqs,38297
@@ -226,19 +226,19 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
226
226
  dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
227
227
  dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
228
228
  dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
229
- dtlpy-1.111.11.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
230
- dtlpy-1.111.11.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
231
- dtlpy-1.111.11.data/scripts/dlp.py,sha256=ZpfJvYE1_OTSorEYBphqTOutnHSb5TqOXh0y_mUCTJs,4393
229
+ dtlpy-1.113.10.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
230
+ dtlpy-1.113.10.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
231
+ dtlpy-1.113.10.data/scripts/dlp.py,sha256=ZpfJvYE1_OTSorEYBphqTOutnHSb5TqOXh0y_mUCTJs,4393
232
232
  tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
233
233
  tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
234
234
  tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
235
235
  tests/assets/models_flow/main.py,sha256=vnDKyVZaae2RFpvwS22Hzi6Dt2LJerH4yQrmKtaT8_g,2123
236
236
  tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
237
237
  tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
238
- tests/features/environment.py,sha256=JcM956BxLBRvDqy6Kr1Nxd1FY_gxbE6XztZBVBMCGYM,18897
239
- dtlpy-1.111.11.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
240
- dtlpy-1.111.11.dist-info/METADATA,sha256=QzXWZvBKXwMVg-SiV3_zuuoZz_Dvu2qWgddvxF4EvZ8,5470
241
- dtlpy-1.111.11.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
242
- dtlpy-1.111.11.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
243
- dtlpy-1.111.11.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
244
- dtlpy-1.111.11.dist-info/RECORD,,
238
+ tests/features/environment.py,sha256=ZZNSN8TObnNMkX0IQhSolAs_9I_V9hHFL_IZjG0jrGU,18909
239
+ dtlpy-1.113.10.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
240
+ dtlpy-1.113.10.dist-info/METADATA,sha256=vyFPyK_u6Jdx3Fj5scfdLqseFuoQiqFAPk_WXSnYcl8,5470
241
+ dtlpy-1.113.10.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
242
+ dtlpy-1.113.10.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
243
+ dtlpy-1.113.10.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
244
+ dtlpy-1.113.10.dist-info/RECORD,,
@@ -294,10 +294,10 @@ def after_tag(context, tag):
294
294
  pass
295
295
  elif tag == 'wip':
296
296
  pass
297
- elif any(i_tag in tag for i_tag in ['DAT-', 'qa-', 'rc_only', 'skip_test', 'ATP', 'AIRGAPPED']):
297
+ elif any(i_tag in tag for i_tag in ['DAT-', 'qa-', 'rc_only', 'skip_test', 'ATP', 'AIRGAPPED', 'DM-cache']):
298
298
  pass
299
299
  else:
300
- raise ValueError('unknown tag: {}'.format(tag))
300
+ raise ValueError('Unknown tag: {}'.format(tag))
301
301
 
302
302
 
303
303
  @fixture
File without changes