dtlpy 1.110.3__py3-none-any.whl → 1.112.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dtlpy/__init__.py CHANGED
@@ -293,6 +293,8 @@ ANNOTATION_TYPE_POSE = AnnotationType.POSE
293
293
  ANNOTATION_TYPE_SEGMENTATION = AnnotationType.SEGMENTATION
294
294
  ANNOTATION_TYPE_SUBTITLE = AnnotationType.SUBTITLE
295
295
  ANNOTATION_TYPE_TEXT = AnnotationType.TEXT
296
+ ANNOTATION_TYPE_FREE_TEXT = AnnotationType.FREE_TEXT
297
+ ANNOTATION_TYPE_REF_IMAGE = AnnotationType.REF_IMAGE
296
298
 
297
299
  ITEM_STATUS_COMPLETED = ItemStatus.COMPLETED
298
300
  ITEM_STATUS_APPROVED = ItemStatus.APPROVED
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.110.3'
1
+ version = '1.112.9'
@@ -84,7 +84,7 @@ class FileHistory(History):
84
84
  def write(t):
85
85
  f.write(t.encode('utf-8'))
86
86
 
87
- write('\n# %s\n' % datetime.datetime.utcnow())
87
+ write('\n# %s\n' % datetime.datetime.now(datetime.timezone.utc))
88
88
  for line in string.split('\n'):
89
89
  hide = any(field in line for field in self.to_hide)
90
90
  if not hide:
dtlpy/dlp/dlp.py CHANGED
@@ -27,7 +27,7 @@ logger.propagate = False
27
27
 
28
28
 
29
29
  def dlp_exit():
30
- print(datetime.datetime.utcnow())
30
+ print(datetime.datetime.now(datetime.timezone.utc))
31
31
  print("Goodbye ;)")
32
32
  sys.exit(0)
33
33
 
@@ -71,7 +71,7 @@ def main():
71
71
  else:
72
72
  command_executor.run(args=args)
73
73
  except exceptions.TokenExpired:
74
- print(datetime.datetime.utcnow())
74
+ print(datetime.datetime.now(datetime.timezone.utc))
75
75
  print("[ERROR] token expired, please login.")
76
76
  continue
77
77
  except SystemExit as e:
@@ -83,11 +83,11 @@ def main():
83
83
  sys.exit(0)
84
84
  # error
85
85
  else:
86
- print(datetime.datetime.utcnow())
86
+ print(datetime.datetime.now(datetime.timezone.utc))
87
87
  print('"{command}" is not a valid command'.format(command=text))
88
88
  continue
89
89
  except Exception as e:
90
- print(datetime.datetime.utcnow())
90
+ print(datetime.datetime.now(datetime.timezone.utc))
91
91
  if hasattr(e, 'message'):
92
92
  print(e.message)
93
93
  else:
@@ -102,11 +102,11 @@ def main():
102
102
  command_executor.run(args=args)
103
103
  sys.exit(0)
104
104
  except exceptions.TokenExpired:
105
- print(datetime.datetime.utcnow())
105
+ print(datetime.datetime.now(datetime.timezone.utc))
106
106
  print("[ERROR] token expired, please login.")
107
107
  sys.exit(1)
108
108
  except Exception as e:
109
- print(datetime.datetime.utcnow())
109
+ print(datetime.datetime.now(datetime.timezone.utc))
110
110
  print(traceback.format_exc())
111
111
  print(e)
112
112
  sys.exit(1)
@@ -123,6 +123,6 @@ if __name__ == "__main__":
123
123
  try:
124
124
  main()
125
125
  except Exception as err:
126
- print(datetime.datetime.utcnow())
126
+ print(datetime.datetime.now(datetime.timezone.utc))
127
127
  print("[ERROR]\t%s" % err)
128
128
  print("Dataloop.ai CLI. Type dlp --help for options")
@@ -47,6 +47,8 @@ class AnnotationType(str, Enum):
47
47
  GIS = "gis"
48
48
  SEMANTIC_3D = "ref_semantic_3d"
49
49
  POLYLINE_3D = "polyline_3d"
50
+ FREE_TEXT = "text"
51
+ REF_IMAGE = "ref_image"
50
52
 
51
53
 
52
54
  class ViewAnnotationOptions(str, Enum):
dtlpy/entities/compute.py CHANGED
@@ -216,6 +216,7 @@ class ComputeCluster:
216
216
  node_pools: Optional[List[NodePool]] = None,
217
217
  metadata: Optional[Dict] = None,
218
218
  authentication: Optional[Authentication] = None,
219
+ plugins: Optional[dict] = None
219
220
  ):
220
221
  self.name = name
221
222
  self.endpoint = endpoint
@@ -225,6 +226,7 @@ class ComputeCluster:
225
226
  self.metadata = metadata if metadata is not None else {}
226
227
  self.authentication = authentication if authentication is not None else Authentication(
227
228
  AuthenticationIntegration("", ""))
229
+ self.plugins = plugins
228
230
 
229
231
  @classmethod
230
232
  def from_json(cls, _json):
@@ -235,7 +237,8 @@ class ComputeCluster:
235
237
  provider=ClusterProvider(_json.get('provider')),
236
238
  node_pools=[NodePool.from_json(np) for np in _json.get('nodePools', list())],
237
239
  metadata=_json.get('metadata'),
238
- authentication=Authentication.from_json(_json.get('authentication', dict()))
240
+ authentication=Authentication.from_json(_json.get('authentication', dict())),
241
+ plugins=_json.get('plugins')
239
242
  )
240
243
 
241
244
  def to_json(self):
@@ -246,20 +249,22 @@ class ComputeCluster:
246
249
  'provider': self.provider.value,
247
250
  'nodePools': [np.to_json() for np in self.node_pools],
248
251
  'metadata': self.metadata,
249
- 'authentication': self.authentication.to_json()
252
+ 'authentication': self.authentication.to_json(),
253
+ 'plugins': self.plugins
250
254
  }
251
255
 
252
256
  @classmethod
253
257
  def from_setup_json(cls, devops_output, integration):
254
258
  node_pools = [NodePool.from_json(n) for n in devops_output['config']['nodePools']]
255
259
  return cls(
256
- devops_output['config']['name'],
257
- devops_output['config']['endpoint'],
258
- devops_output['config']['kubernetesVersion'],
259
- ClusterProvider(devops_output['config']['provider']),
260
- node_pools,
261
- {},
262
- Authentication(AuthenticationIntegration(integration.id, integration.type))
260
+ name=devops_output['config']['name'],
261
+ endpoint=devops_output['config']['endpoint'],
262
+ kubernetes_version=devops_output['config']['kubernetesVersion'],
263
+ provider=ClusterProvider(devops_output['config']['provider']),
264
+ node_pools=node_pools,
265
+ metadata={},
266
+ authentication=Authentication(AuthenticationIntegration(integration.id, integration.type)),
267
+ plugins=devops_output['config'].get('plugins')
263
268
  )
264
269
 
265
270
 
dtlpy/entities/filters.py CHANGED
@@ -332,7 +332,6 @@ class Filters:
332
332
  self._unique_fields = ['type']
333
333
  values = [annotation_type.value for annotation_type in entities.AnnotationType]
334
334
  values.remove(entities.AnnotationType.NOTE.value)
335
- values += ["text", "ref_image"] # Prompt Annotation Types
336
335
  self.add(field='type', values=values, operator=FiltersOperations.IN, method=FiltersMethod.AND)
337
336
 
338
337
  def __generate_query(self):
@@ -111,9 +111,12 @@ class Integration(entities.BaseEntity):
111
111
  raise ValueError('Must input a valid Project entity')
112
112
  self._project = project
113
113
 
114
- def update(self,
115
- new_name: str = None,
116
- new_options: dict = None):
114
+ def update(
115
+ self,
116
+ new_name: str = None,
117
+ new_options: dict = None,
118
+ reload_services: bool = None
119
+ ):
117
120
  """
118
121
  Update the integration's name.
119
122
 
@@ -121,6 +124,7 @@ class Integration(entities.BaseEntity):
121
124
 
122
125
  :param str new_name: new name
123
126
  :param dict new_options: new value
127
+ :param bool reload_services: reload services associated with this integration
124
128
  :return: Integration object
125
129
  :rtype: dtlpy.entities.integration.Integration
126
130
 
@@ -148,10 +152,13 @@ class Integration(entities.BaseEntity):
148
152
  error='400',
149
153
  message='Must provide an identifier in inputs')
150
154
 
151
- identifier.integrations.update(new_name=new_name,
152
- integrations_id=self.id,
153
- integration=self,
154
- new_options=new_options)
155
+ identifier.integrations.update(
156
+ new_name=new_name,
157
+ integrations_id=self.id,
158
+ integration=self,
159
+ new_options=new_options,
160
+ reload_services=reload_services
161
+ )
155
162
 
156
163
  def delete(self,
157
164
  sure: bool = False,
@@ -6,6 +6,8 @@ import copy
6
6
  import sys
7
7
 
8
8
  import attr
9
+
10
+ from .filters import FiltersOperations, FiltersOrderByDirection, FiltersResource
9
11
  from .. import miscellaneous
10
12
  from ..services.api_client import ApiClient
11
13
 
@@ -29,6 +31,10 @@ class PagedEntities:
29
31
  total_pages_count = attr.ib(default=0)
30
32
  items_count = attr.ib(default=0)
31
33
 
34
+ # hybrid pagination
35
+ use_id_based_paging = attr.ib(default=False)
36
+ last_seen_id = attr.ib(default=None)
37
+
32
38
  # execution attribute
33
39
  _service_id = attr.ib(default=None, repr=False)
34
40
  _project_id = attr.ib(default=None, repr=False)
@@ -43,6 +49,15 @@ class PagedEntities:
43
49
  # items list
44
50
  items = attr.ib(default=miscellaneous.List(), repr=False)
45
51
 
52
+ @staticmethod
53
+ def _has_explicit_sort(flt):
54
+ """
55
+ Check if the filter has custom sort fields defined (not id/createdAt).
56
+ """
57
+ prepared = flt.prepare() if flt else {}
58
+ sort_fields = list(prepared.get("sort", {}).keys())
59
+ return bool(sort_fields and sort_fields[0] not in {"id", "createdAt"})
60
+
46
61
  def process_result(self, result):
47
62
  """
48
63
  :param result: json object
@@ -71,7 +86,8 @@ class PagedEntities:
71
86
  return self.items_count
72
87
 
73
88
  def __iter__(self):
74
- pbar = tqdm.tqdm(total=self.total_pages_count, disable=self._client_api.verbose.disable_progress_bar_iterate_pages,
89
+ pbar = tqdm.tqdm(total=self.total_pages_count,
90
+ disable=self._client_api.verbose.disable_progress_bar_iterate_pages,
75
91
  file=sys.stdout, desc="Iterate Pages")
76
92
  if self.page_offset != 0:
77
93
  # reset the count for page 0
@@ -109,18 +125,68 @@ class PagedEntities:
109
125
  if page_offset is None:
110
126
  page_offset = self.page_offset
111
127
 
112
- if self.filters is not None:
113
- filters = copy.copy(self.filters)
114
- filters.page = page_offset
115
- filters.page_size = page_size
116
- if self._list_function is None:
117
- result = self.items_repository._list(filters=filters)
128
+ if self.filters is None:
129
+ raise ValueError("Cant return page. Filters is empty")
130
+
131
+ req = copy.deepcopy(self.filters)
132
+ req.page_size = page_size
133
+
134
+ after_id = getattr(req, "after_id", None)
135
+ if after_id is not None:
136
+ delattr(req, "after_id")
137
+
138
+ enable_hybrid = getattr(self.filters, "resource", None) in [
139
+ FiltersResource.ITEM,
140
+ FiltersResource.ANNOTATION,
141
+ FiltersResource.FEATURE,
142
+ ]
143
+
144
+ if enable_hybrid and not self._has_explicit_sort(req):
145
+ req.sort_by(field="id", value=FiltersOrderByDirection.ASCENDING)
146
+
147
+ if enable_hybrid and self.use_id_based_paging:
148
+ req.page = 0
149
+ if self.last_seen_id:
150
+ req.add(
151
+ field="id",
152
+ values=self.last_seen_id,
153
+ operator=FiltersOperations.GREATER_THAN,
154
+ method=FiltersOperations.AND,
155
+ )
156
+ else:
157
+ auto_hybrid = (
158
+ enable_hybrid
159
+ and not self.use_id_based_paging
160
+ and not self._has_explicit_sort(self.filters)
161
+ and self.last_seen_id is not None
162
+ )
163
+ if auto_hybrid and page_offset > 0:
164
+ req.page = 0
165
+ req.add(
166
+ field="id",
167
+ values=after_id or self.last_seen_id,
168
+ operator=FiltersOperations.GREATER_THAN,
169
+ method=FiltersOperations.AND,
170
+ )
171
+ self.use_id_based_paging = True
118
172
  else:
119
- result = self._list_function(filters=filters)
120
- items = self.process_result(result)
121
- return items
173
+ req.page = page_offset
174
+
175
+ if self._list_function is None:
176
+ result = self.items_repository._list(filters=req)
122
177
  else:
123
- raise ValueError('Cant return page. Filters is empty')
178
+ result = self._list_function(filters=req)
179
+
180
+ items = self.process_result(result)
181
+
182
+ if enable_hybrid and items and hasattr(items[-1], "id"):
183
+ self.last_seen_id = items[-1].id
184
+
185
+ if self.use_id_based_paging:
186
+ if "hasNextPage" not in result:
187
+ self.has_next_page = len(items) == page_size
188
+
189
+ return items
124
190
 
125
191
  def get_page(self, page_offset=None, page_size=None):
126
192
  """
@@ -164,7 +230,8 @@ class PagedEntities:
164
230
  def all(self):
165
231
  page_offset = 0
166
232
  page_size = 100
167
- pbar = tqdm.tqdm(total=self.items_count, disable=self._client_api.verbose.disable_progress_bar,
233
+ pbar = tqdm.tqdm(total=self.items_count,
234
+ disable=self._client_api.verbose.disable_progress_bar,
168
235
  file=sys.stdout, desc='Iterate Entity')
169
236
  total_pages = math.ceil(self.items_count / page_size)
170
237
  jobs = list()
@@ -192,4 +259,4 @@ class PagedEntities:
192
259
  self.items.print(columns=columns)
193
260
 
194
261
  def to_df(self, columns=None):
195
- return self.items.to_df(columns=columns)
262
+ return self.items.to_df(columns=columns)
@@ -76,7 +76,7 @@ class List(list, typing.MutableSequence[T]):
76
76
  str_timestamp = str(element['createdAt'])
77
77
  if len(str_timestamp) > 10:
78
78
  str_timestamp = str_timestamp[:10]
79
- element['createdAt'] = datetime.datetime.utcfromtimestamp(int(str_timestamp)).isoformat()
79
+ element['createdAt'] = datetime.datetime.fromtimestamp(int(str_timestamp), datetime.timezone.utc).isoformat()
80
80
  except Exception:
81
81
  pass
82
82
  df = pandas.DataFrame(to_print, columns=keys_list)
@@ -117,15 +117,14 @@ class Zipping:
117
117
 
118
118
  @staticmethod
119
119
  def unzip_directory(zip_filename, to_directory=None):
120
- zipdata = zipfile.ZipFile(zip_filename)
121
- zipinfos = zipdata.infolist()
122
-
123
- # iterate through each file
124
- for zipinfo in zipinfos:
125
- # encode the file names
126
- # zip package make decode by cp437 for file that have name that not ascii
127
- # this happen when the flag_bits be different than 0
128
- # so we encode the name back
129
- if not zipinfo.flag_bits:
130
- zipinfo.filename = zipinfo.filename.encode('cp437').decode('utf-8')
131
- zipdata.extract(zipinfo, to_directory)
120
+ with zipfile.ZipFile(zip_filename) as zipdata:
121
+ zipinfos = zipdata.infolist()
122
+ # iterate through each file
123
+ for zipinfo in zipinfos:
124
+ # encode the file names
125
+ # zip package make decode by cp437 for file that have name that not ascii
126
+ # this happen when the flag_bits be different than 0
127
+ # so we encode the name back
128
+ if not zipinfo.flag_bits:
129
+ zipinfo.filename = zipinfo.filename.encode('cp437').decode('utf-8')
130
+ zipdata.extract(zipinfo, to_directory)
@@ -313,27 +313,27 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
313
313
  self.logger.debug("Downloading subset {!r} of {}".format(subset,
314
314
  self.model_entity.dataset.name))
315
315
 
316
- if self.model_entity.output_type is not None:
316
+ annotation_filters = None
317
+ if self.model_entity.output_type is not None and self.model_entity.output_type != "embedding":
318
+ annotation_filters = entities.Filters(resource=entities.FiltersResource.ANNOTATION, use_defaults=False)
317
319
  if self.model_entity.output_type in [entities.AnnotationType.SEGMENTATION,
318
- entities.AnnotationType.POLYGON]:
320
+ entities.AnnotationType.POLYGON]:
319
321
  model_output_types = [entities.AnnotationType.SEGMENTATION, entities.AnnotationType.POLYGON]
320
322
  else:
321
323
  model_output_types = [self.model_entity.output_type]
322
- annotation_filters = entities.Filters(
324
+
325
+ annotation_filters.add(
323
326
  field=entities.FiltersKnownFields.TYPE,
324
327
  values=model_output_types,
325
- resource=entities.FiltersResource.ANNOTATION,
326
328
  operator=entities.FiltersOperations.IN
327
329
  )
328
- else:
329
- annotation_filters = entities.Filters(resource=entities.FiltersResource.ANNOTATION)
330
330
 
331
- if not self.configuration.get("include_model_annotations", False):
332
- annotation_filters.add(
333
- field="metadata.system.model.name",
334
- values=False,
335
- operator=entities.FiltersOperations.EXISTS
336
- )
331
+ if not self.configuration.get("include_model_annotations", False):
332
+ annotation_filters.add(
333
+ field="metadata.system.model.name",
334
+ values=False,
335
+ operator=entities.FiltersOperations.EXISTS
336
+ )
337
337
 
338
338
  ret_list = dataset.items.download(filters=filters,
339
339
  local_path=data_subset_base_path,
@@ -467,7 +467,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
467
467
  @entities.Package.decorators.function(display_name='Embed Items',
468
468
  inputs={'items': 'Item[]'},
469
469
  outputs={'items': 'Item[]', 'features': 'Json[]'})
470
- def embed_items(self, items: list, upload_features=None, batch_size=None, **kwargs):
470
+ def embed_items(self, items: list, upload_features=None, batch_size=None, progress:utilities.Progress=None, **kwargs):
471
471
  """
472
472
  Extract feature from an input list of items (or single) and return the items and the feature vector.
473
473
 
@@ -523,8 +523,9 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
523
523
  "Uploading items' feature vectors for model {!r}.".format(self.model_entity.name))
524
524
  try:
525
525
  list(pool.map(partial(self._upload_model_features,
526
- feature_set.id,
527
- self.model_entity.project_id),
526
+ progress.logger if progress is not None else self.logger,
527
+ feature_set.id,
528
+ self.model_entity.project_id),
528
529
  batch_items,
529
530
  batch_vectors))
530
531
  except Exception as err:
@@ -541,6 +542,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
541
542
  filters: entities.Filters = None,
542
543
  upload_features=None,
543
544
  batch_size=None,
545
+ progress:utilities.Progress=None,
544
546
  **kwargs):
545
547
  """
546
548
  Extract feature from all items given
@@ -569,6 +571,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
569
571
  self.embed_items(items=items,
570
572
  upload_features=upload_features,
571
573
  batch_size=batch_size,
574
+ progress=progress,
572
575
  **kwargs)
573
576
  return True
574
577
 
@@ -764,7 +767,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
764
767
  # =============
765
768
 
766
769
  @staticmethod
767
- def _upload_model_features(feature_set_id, project_id, item: entities.Item, vector):
770
+ def _upload_model_features(logger, feature_set_id, project_id, item: entities.Item, vector):
768
771
  try:
769
772
  if vector is not None:
770
773
  item.features.create(value=vector,
dtlpy/ml/train_utils.py CHANGED
@@ -28,8 +28,8 @@ def prepare_dataset(dataset: entities.Dataset,
28
28
  """
29
29
 
30
30
  project = dataset.project
31
- now = datetime.datetime.utcnow().isoformat(timespec='minutes', sep='T') # This serves as an id
32
- today = datetime.datetime.utcnow().strftime('%F')
31
+ now = datetime.datetime.now(datetime.timezone.utc).isoformat(timespec='minutes', sep='T') # This serves as an id
32
+ today = datetime.datetime.now(datetime.timezone.utc).strftime('%F')
33
33
 
34
34
  # CLONE
35
35
  clone_name = 'cloned-{ds_name}-{date_str}'.format(ds_name=dataset.name, date_str=today)
@@ -53,7 +53,8 @@ class Computes:
53
53
  features: Optional[Dict] = None,
54
54
  wait=True,
55
55
  status: entities.ComputeStatus = None,
56
- settings: entities.ComputeSettings = None
56
+ settings: entities.ComputeSettings = None,
57
+ metadata: dict = None
57
58
  ):
58
59
  """
59
60
  Create a new compute
@@ -68,10 +69,12 @@ class Computes:
68
69
  :param wait: Wait for compute creation
69
70
  :param status: Compute status
70
71
  :param settings: Compute settings
72
+ :param metadata: Compute metadata
71
73
  :return: Compute
72
74
  :rtype: dl.entities.compute.Compute
73
75
  """
74
-
76
+ if metadata is None:
77
+ metadata = {}
75
78
  shared_contexts_json = []
76
79
  for shared_context in shared_contexts:
77
80
  src_json = shared_context.to_json() if isinstance(shared_context, entities.ComputeContext) else shared_context
@@ -85,7 +88,8 @@ class Computes:
85
88
  'sharedContexts': shared_contexts_json,
86
89
  'cluster': cluster.to_json(),
87
90
  'status': status,
88
- "settings": settings.to_json() if isinstance(settings, entities.ComputeSettings) else settings
91
+ "settings": settings.to_json() if isinstance(settings, entities.ComputeSettings) else settings,
92
+ "metadata": metadata
89
93
  }
90
94
 
91
95
  # request
@@ -129,25 +133,31 @@ class Computes:
129
133
  def __get_log_compute_progress_callback(self, compute_id: str):
130
134
  def func():
131
135
  compute = self.get(compute_id=compute_id)
132
- bootstrap_progress = compute.metadata.get('system', {}).get('bootstrapProcess', {}).get('progress', None)
133
- bootstrap_logs = compute.metadata.get('system', {}).get('bootstrapProcess', {}).get('logs', None)
136
+ bootstrap_progress = compute.metadata.get('system', {}).get('bootstrap', {}).get('progress', None)
137
+ bootstrap_logs = compute.metadata.get('system', {}).get('bootstrap', {}).get('logs', None)
134
138
  validation_progress = compute.metadata.get('system', {}).get('validation', {}).get('progress', None)
135
139
  validation_logs = compute.metadata.get('system', {}).get('validation', {}).get('logs', None)
136
- if bootstrap_progress not in [None, 100]:
137
- logger.info(f"Bootstrap in progress: {bootstrap_progress}%")
140
+ if bootstrap_progress is not None:
141
+ if 'bootstrap' not in self.log_cache.get(compute_id, {}):
142
+ logger.info(f"Bootstrap in progress:")
138
143
  last_index = len(self.log_cache.get(compute_id, {}).get('bootstrap', []))
139
144
  new_logs = bootstrap_logs[last_index:]
140
145
  if new_logs:
141
- logger.info("Bootstrap Logs: {}".format('\n'.join(new_logs)))
146
+ for log in new_logs:
147
+ logger.info(log)
148
+ logger.info(f'Bootstrap progress: {int(bootstrap_progress)}%')
142
149
  if compute_id not in self.log_cache:
143
150
  self.log_cache[compute_id] = {}
144
151
  self.log_cache[compute_id]['bootstrap'] = bootstrap_logs
145
- if validation_progress not in [None, 100]:
146
- logger.info(f"Validating created compute. Progress: {validation_progress}%")
152
+ if bootstrap_progress in [100, None] and validation_progress is not None:
153
+ if 'validation' not in self.log_cache.get(compute_id, {}):
154
+ logger.info(f"Validating created compute:")
147
155
  last_index = len(self.log_cache.get(compute_id, {}).get('validation', []))
148
156
  new_logs = validation_logs[last_index:]
149
157
  if new_logs:
150
- logger.info("Validation Logs: {}".format('\n'.join(new_logs)))
158
+ for log in new_logs:
159
+ logger.info(log)
160
+ logger.info(f'Validation progress: {int(validation_progress)}%')
151
161
  if compute_id not in self.log_cache:
152
162
  self.log_cache[compute_id] = {}
153
163
  self.log_cache[compute_id]['validation'] = validation_logs
@@ -312,7 +322,8 @@ class Computes:
312
322
  cluster,
313
323
  ComputeType.KUBERNETES,
314
324
  status=config['config'].get('status', None),
315
- settings=config['config'].get('settings', None))
325
+ settings=config['config'].get('settings', None),
326
+ metadata=config['config'].get('metadata', None))
316
327
 
317
328
  return compute
318
329
 
@@ -96,6 +96,9 @@ class Downloader:
96
96
  error='400',
97
97
  message='Unknown annotation download option: {}, please choose from: {}'.format(
98
98
  ann_option, list(entities.ViewAnnotationOptions)))
99
+ # normalize items argument: treat empty list as “no items specified”
100
+ if isinstance(items, list) and len(items) == 0:
101
+ items = None
99
102
  #####################
100
103
  # items to download #
101
104
  #####################
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ import re
2
3
 
3
4
  from .. import entities, miscellaneous, exceptions, _api_reference
4
5
  from ..services.api_client import ApiClient
@@ -151,7 +152,8 @@ class Drivers:
151
152
  allow_external_delete: bool = True,
152
153
  region: str = None,
153
154
  storage_class: str = "",
154
- path: str = ""):
155
+ path: str = "",
156
+ endpoint: str = None):
155
157
  """
156
158
  Create a storage driver.
157
159
 
@@ -167,6 +169,7 @@ class Drivers:
167
169
  :param str region: relevant only for s3 - the bucket region
168
170
  :param str storage_class: relevant only for s3
169
171
  :param str path: Optional. By default path is the root folder. Path is case sensitive integration
172
+ :param endpoint path: Optional. Custom endpoint for S3 storage. Must be in the format 'http://<hostname>:<port>' or 'https://<hostname>:<port>'.
170
173
  :return: driver object
171
174
  :rtype: dtlpy.entities.driver.Driver
172
175
 
@@ -185,6 +188,11 @@ class Drivers:
185
188
  integration_type = driver_type
186
189
  if driver_type == entities.ExternalStorage.S3:
187
190
  bucket_payload = 'bucketName'
191
+ if endpoint:
192
+ if not re.match(r'^https?://[A-Za-z0-9.-]+:\d+$', endpoint):
193
+ raise ValueError(
194
+ f"Invalid endpoint URL '{endpoint}'. Must be 'http://<hostname>:<port>' or 'https://<hostname>:<port>'."
195
+ )
188
196
  elif driver_type == entities.ExternalStorage.GCS:
189
197
  bucket_payload = 'bucket'
190
198
  else:
@@ -208,6 +216,8 @@ class Drivers:
208
216
  "allowExternalDelete": allow_external_delete,
209
217
  "creator": self._client_api.info().get('user_email')
210
218
  }
219
+ if endpoint and driver_type == entities.ExternalStorage.S3:
220
+ payload['payload']['endpoint'] = endpoint
211
221
 
212
222
  success, response = self._client_api.gen_request(req_type='post',
213
223
  path='/drivers',
@@ -111,7 +111,7 @@ class Features:
111
111
  if self._project_id is None:
112
112
  self._project_id = self.project.id
113
113
  filters.context = {"projects": [self._project_id]}
114
-
114
+
115
115
  paged = entities.PagedEntities(items_repository=self,
116
116
  filters=filters,
117
117
  page_offset=filters.page,
@@ -120,8 +120,10 @@ class Integrations:
120
120
  aws-cross - {}
121
121
  gcp-cross - {}
122
122
  gcp-workload-identity-federation - {"secret": "", "content": "{}", "clientId": ""}
123
- private-registry (ECR) - {"name": "", "spec": {"accessKeyId": "", "secretAccessKey": "", "account": "", "region": ""}}
124
- private-registry (GAR) - {"name": "", "spec": {"password": ""}} (can use generate_gar_options to generate the options)
123
+ private-registry (ECR) - can use generate_ecr_options to generate the options
124
+ private-registry (GAR) - use generate_gar_options to generate the options
125
+ private-registry (ACR) - use generate_azure_container_registry_options to generate the options
126
+ private-registry (DockerHub) - use generate_docker_hub_options to generate the options
125
127
 
126
128
  **Prerequisites**: You must be an *owner* in the organization.
127
129
 
@@ -180,6 +182,7 @@ class Integrations:
180
182
  integration: entities.Integration = None,
181
183
  new_options: dict = None,
182
184
  organization_id: str = None,
185
+ reload_services: bool = None,
183
186
  ):
184
187
  """
185
188
  Update the integration's name.
@@ -191,6 +194,7 @@ class Integrations:
191
194
  :param Integration integration: integration object
192
195
  :param dict new_options: new value
193
196
  :param str organization_id: organization id
197
+ :param bool reload_services: reload services associated with this integration
194
198
  :return: Integration object
195
199
  :rtype: dtlpy.entities.integration.Integration
196
200
 
@@ -225,7 +229,16 @@ class Integrations:
225
229
  else:
226
230
  organization_id = self.org.id
227
231
 
228
- url_path = '/orgs/{}/integrations/'.format(organization_id)
232
+ if reload_services is None:
233
+ logger.warning(
234
+ "Param reload_services was not provided. If the integration you are updating is used\n"
235
+ "in FaaS services these services will keep using the old value until updated."
236
+ )
237
+
238
+ url_path = '/orgs/{org_id}/integrations{query_params}'.format(
239
+ org_id=organization_id,
240
+ query_params='?reloadServices=true' if reload_services else ''
241
+ )
229
242
  payload = dict(integrationId=integrations_id if integrations_id is not None else integration.id)
230
243
  if new_name is not None:
231
244
  payload['name'] = new_name
@@ -355,6 +368,21 @@ class Integrations:
355
368
  """
356
369
  return IntegrationUtils.generate_docker_hub_options(username=username, password=password, email=email)
357
370
 
371
+ @staticmethod
372
+ def generate_azure_container_registry_options(username: str, password: str, location: str) -> dict:
373
+ """
374
+ Generates an Azure Container Registry JSON configuration and returns it as a base64-encoded string.
375
+
376
+ Parameters:
377
+ username (str): The Azure username.
378
+ password (str): The Azure password.
379
+ location (str): server URL of Azure Container Registry
380
+
381
+ Returns:
382
+ str: A base64-encoded string representation of the repository JSON configuration.
383
+ """
384
+ return IntegrationUtils.generate_docker_hub_options(username=username, password=password, location=location)
385
+
358
386
  @staticmethod
359
387
  def generate_ecr_options(access_key_id: str, secret_access_key: str, account: str, region: str) -> dict:
360
388
  """
@@ -426,7 +454,7 @@ class IntegrationUtils:
426
454
  )
427
455
 
428
456
  @staticmethod
429
- def generate_docker_hub_options(username: str, password: str, email: str = None) -> dict:
457
+ def generate_docker_hub_options(username: str, password: str, email: str = None, location='docker.io') -> dict:
430
458
 
431
459
  if not username:
432
460
  raise ValueError('Missing Username')
@@ -436,7 +464,7 @@ class IntegrationUtils:
436
464
  auth = IntegrationUtils.encode('{}:{}'.format(username, password))
437
465
 
438
466
  return IntegrationUtils.generate_json_key_options(
439
- location='docker.io',
467
+ location=location,
440
468
  username=username,
441
469
  password=password,
442
470
  auth=auth,
@@ -1351,8 +1351,8 @@ class ApiClient:
1351
1351
  else:
1352
1352
  payload = jwt.decode(self.token, algorithms=['HS256'],
1353
1353
  options={'verify_signature': False}, verify=False)
1354
- d = datetime.datetime.utcnow()
1355
- epoch = datetime.datetime(1970, 1, 1)
1354
+ d = datetime.datetime.now(datetime.timezone.utc)
1355
+ epoch = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
1356
1356
  now = (d - epoch).total_seconds()
1357
1357
  exp = payload['exp']
1358
1358
  if now < (exp - t):
@@ -106,7 +106,7 @@ class DataloopLogger(logging.handlers.BaseRotatingHandler):
106
106
  @staticmethod
107
107
  def get_log_filepath():
108
108
  log_path = DataloopLogger.get_log_path()
109
- log_filepath = os.path.join(log_path, '{}.log'.format(datetime.datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S')))
109
+ log_filepath = os.path.join(log_path, '{}.log'.format(datetime.datetime.now(datetime.timezone.utc).strftime('%Y-%m-%d_%H-%M-%S')))
110
110
  return log_filepath
111
111
 
112
112
  def doRollover(self):
@@ -214,7 +214,7 @@ class Reporter:
214
214
  os.mkdir(reports_dir)
215
215
  log_filepath = os.path.join(reports_dir,
216
216
  "log_{}_{}.json".format(self._resource,
217
- datetime.datetime.utcnow().strftime("%Y%m%d_%H%M%S")))
217
+ datetime.datetime.now(datetime.timezone.utc).strftime("%Y%m%d_%H%M%S")))
218
218
  errors_json = dict()
219
219
  if self.cache_mode == 'diskcache':
220
220
  err_cache = self._reports['errors']
@@ -27,7 +27,7 @@ logger.propagate = False
27
27
 
28
28
 
29
29
  def dlp_exit():
30
- print(datetime.datetime.utcnow())
30
+ print(datetime.datetime.now(datetime.timezone.utc))
31
31
  print("Goodbye ;)")
32
32
  sys.exit(0)
33
33
 
@@ -71,7 +71,7 @@ def main():
71
71
  else:
72
72
  command_executor.run(args=args)
73
73
  except exceptions.TokenExpired:
74
- print(datetime.datetime.utcnow())
74
+ print(datetime.datetime.now(datetime.timezone.utc))
75
75
  print("[ERROR] token expired, please login.")
76
76
  continue
77
77
  except SystemExit as e:
@@ -83,11 +83,11 @@ def main():
83
83
  sys.exit(0)
84
84
  # error
85
85
  else:
86
- print(datetime.datetime.utcnow())
86
+ print(datetime.datetime.now(datetime.timezone.utc))
87
87
  print('"{command}" is not a valid command'.format(command=text))
88
88
  continue
89
89
  except Exception as e:
90
- print(datetime.datetime.utcnow())
90
+ print(datetime.datetime.now(datetime.timezone.utc))
91
91
  if hasattr(e, 'message'):
92
92
  print(e.message)
93
93
  else:
@@ -102,11 +102,11 @@ def main():
102
102
  command_executor.run(args=args)
103
103
  sys.exit(0)
104
104
  except exceptions.TokenExpired:
105
- print(datetime.datetime.utcnow())
105
+ print(datetime.datetime.now(datetime.timezone.utc))
106
106
  print("[ERROR] token expired, please login.")
107
107
  sys.exit(1)
108
108
  except Exception as e:
109
- print(datetime.datetime.utcnow())
109
+ print(datetime.datetime.now(datetime.timezone.utc))
110
110
  print(traceback.format_exc())
111
111
  print(e)
112
112
  sys.exit(1)
@@ -123,6 +123,6 @@ if __name__ == "__main__":
123
123
  try:
124
124
  main()
125
125
  except Exception as err:
126
- print(datetime.datetime.utcnow())
126
+ print(datetime.datetime.now(datetime.timezone.utc))
127
127
  print("[ERROR]\t%s" % err)
128
128
  print("Dataloop.ai CLI. Type dlp --help for options")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dtlpy
3
- Version: 1.110.3
3
+ Version: 1.112.9
4
4
  Summary: SDK and CLI for Dataloop platform
5
5
  Home-page: https://github.com/dataloop-ai/dtlpy
6
6
  Author: Dataloop Team
@@ -1,5 +1,5 @@
1
- dtlpy/__init__.py,sha256=n86boFpXWXPLjSXZMmWCageGjBumPjWxRv3srysrplk,20338
2
- dtlpy/__version__.py,sha256=CTldC486s3qtTiQLoG-At-M1MH8bU2fAGxa87T99cJU,20
1
+ dtlpy/__init__.py,sha256=-5fpi-yAwFdluh8QZ-sWXwNDCD97Q5BCgIs7pUDl04o,20444
2
+ dtlpy/__version__.py,sha256=XIkrZL_C-sKZYUlGAHoBjRZoQrldqJ58ORqW_bv34Rg,20
3
3
  dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
4
4
  dtlpy/new_instance.py,sha256=tUCzBGaSpm9GTjRuwOkFgo3A8vopUQ-baltdJss3XlI,9964
5
5
  dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
@@ -37,15 +37,15 @@ dtlpy/caches/dl_cache.py,sha256=aaqB0THK6eNmQ54SC6egb6z8sJE3ciKQ5cIHrQHe4r8,5695
37
37
  dtlpy/caches/filesystem_cache.py,sha256=OrBqyEucSVp7g33c6R1BR3ICbkgQnwYWEDhQ7OxHy2Y,2737
38
38
  dtlpy/caches/redis_cache.py,sha256=bgJgxgAXFR_TxPDvlLS4TKumFds-ihNf668JbPYUfpc,2331
39
39
  dtlpy/dlp/__init__.py,sha256=QG_BxSqeic0foFBmzIkpZEF4EvxOZamknj2f5Cb6T6Q,868
40
- dtlpy/dlp/cli_utilities.py,sha256=Kzr-AKbRlXLdGKY2RTUNm0U_vKHxyMOB17TQegeDMdM,16037
40
+ dtlpy/dlp/cli_utilities.py,sha256=gZA9XIN5GG-xWJ6S1i6T17CDDCypDoev6CY-WHlogYg,16055
41
41
  dtlpy/dlp/command_executor.py,sha256=JKtRKTwrKfkXHa1VuFhPw15FuwexBPq_9ANAu2pSyXs,32113
42
42
  dtlpy/dlp/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
43
43
  dtlpy/dlp/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
44
- dtlpy/dlp/dlp.py,sha256=YjNBjeCDTXJ7tj8qdiGZ8lFb8DtPZl-FvViyjxt9xF8,4278
44
+ dtlpy/dlp/dlp.py,sha256=Zv9yoXwNAx4gkED-JiayN-ZkX2dPn4FB0SDx9qc7muo,4404
45
45
  dtlpy/dlp/parser.py,sha256=p-TFaiAU2c3QkI97TXzL2LDR3Eq0hGDFrTc9J2jWLh4,30551
46
46
  dtlpy/entities/__init__.py,sha256=quty3pkoJ9_8dRyZG2WnPj7Qpr4O05FxXpWsY_txz_M,4982
47
47
  dtlpy/entities/analytic.py,sha256=5MpYDKPVsZ1MIy20Ju515RWed6P667j4TLxsan2gyNM,11925
48
- dtlpy/entities/annotation.py,sha256=DYpVb4bNxwXnxXjh3zUa58qo2nSMaQnejiDwUZntn0E,66610
48
+ dtlpy/entities/annotation.py,sha256=0bF-N3ApbUaTWa_cIPNHMGxaWGG0q3lQos6fMDX5mCc,66661
49
49
  dtlpy/entities/annotation_collection.py,sha256=CEYSBHhhDkC0VJdHsBSrA6TgdKGMcKeI3tFM40UJwS8,29838
50
50
  dtlpy/entities/app.py,sha256=vQ7hSMnuRIpoZqZc2NwjGTtWiPTCgi47x_oOgGUB-Pk,6996
51
51
  dtlpy/entities/app_module.py,sha256=0UiAbBX1q8iEImi3nY7ySWZZHoRRwu0qUXmyXmgVAc4,3645
@@ -56,7 +56,7 @@ dtlpy/entities/bot.py,sha256=is3NUCnPg56HSjsHIvFcVkymValMqDV0uHRDC1Ib-ds,3819
56
56
  dtlpy/entities/codebase.py,sha256=pwRkAq2GV0wvmzshg89IAmE-0I2Wsy_-QNOu8OV8uqc,8999
57
57
  dtlpy/entities/collection.py,sha256=FPPPfIxOsBG1ujORPJVq8uXyF8vhIqC6N4EiI9SJzl0,1160
58
58
  dtlpy/entities/command.py,sha256=5RMQYjOGLRF8JZd7QFAPyE8utsp4eZzLApI2dEAbaqo,5301
59
- dtlpy/entities/compute.py,sha256=IFMkHWVu8RyUZSZ24SuE-TnOP-3XzPT5eOEbYFJg76E,14207
59
+ dtlpy/entities/compute.py,sha256=U974uaXGCfr-TSHqPmyLeJWbwbO-3K082hS1Scapv8M,14497
60
60
  dtlpy/entities/dataset.py,sha256=GEvBOly1M8uU--apQZ-G-78DJZzFk178LmMhEANyi0A,53838
61
61
  dtlpy/entities/directory_tree.py,sha256=Rni6pLSWytR6yeUPgEdCCRfTg_cqLOdUc9uCqz9KT-Q,1186
62
62
  dtlpy/entities/dpk.py,sha256=XrK8X8p4Ag6LMjDrDpMstY-h_yTll_sMmKTZT6bLbWE,17923
@@ -64,9 +64,9 @@ dtlpy/entities/driver.py,sha256=O_QdK1EaLjQyQkmvKsmkNgmvmMb1mPjKnJGxK43KrOA,7197
64
64
  dtlpy/entities/execution.py,sha256=uQe535w9OcAoDiNWf96KcpFzUDEUU-DYsUalv5VziyM,13673
65
65
  dtlpy/entities/feature.py,sha256=9fFjD0W57anOVSAVU55ypxN_WTCsWTG03Wkc3cAAj78,3732
66
66
  dtlpy/entities/feature_set.py,sha256=niw4MkmrDbD_LWQu1X30uE6U4DCzmFhPTaYeZ6VZDB0,4443
67
- dtlpy/entities/filters.py,sha256=8nz3V5Ui4LemEIfSTp3uAHJAf_ZpwQrMSrfUL6AB7Zk,22719
67
+ dtlpy/entities/filters.py,sha256=Cdx3BzYa8kIfvW37Gmmwiu4eH4ytfWByu8TQOBvtR2o,22644
68
68
  dtlpy/entities/gis_item.py,sha256=Uk-wMBxwcHsImjz4qOjP-EyZAohbRzN43kMpCaVjCXU,3982
69
- dtlpy/entities/integration.py,sha256=Kdy1j6-cJLW8qNmnqCmdg36phi843YDrlMqcMyMfvYk,5875
69
+ dtlpy/entities/integration.py,sha256=XraOApW9jbT6EdZraRX2In6sMbfNgEGf2V5Um2RCRqA,6001
70
70
  dtlpy/entities/item.py,sha256=WCIPHUmubIe0wva-YMm-LPQdn2S3_-Q151x49C9NEw8,34591
71
71
  dtlpy/entities/label.py,sha256=ycDYavIgKhz806plIX-64c07_TeHpDa-V7LnfFVe4Rg,3869
72
72
  dtlpy/entities/links.py,sha256=FAmEwHtsrqKet3c0UHH9u_gHgG6_OwF1-rl4xK7guME,2516
@@ -80,7 +80,7 @@ dtlpy/entities/package_defaults.py,sha256=wTD7Z7rGYjVy8AcUxTFEnkOkviiJaLVZYvduiU
80
80
  dtlpy/entities/package_function.py,sha256=M42Kvw9A8b6msAkv-wRNAQg_-UC2bejniCjeKDugudc,6314
81
81
  dtlpy/entities/package_module.py,sha256=cOkIITATkzzCQpE0sdPiBUisAz8ImlPG2YGZ0K7SypA,5151
82
82
  dtlpy/entities/package_slot.py,sha256=XBwCodQe618sQm0bmx46Npo94mEk-zUV7ZX0mDRcsD8,3946
83
- dtlpy/entities/paged_entities.py,sha256=grNjt2FYg4gKBlVRDkztI1BPOI4JoGeyjvmOW3BnB3k,5927
83
+ dtlpy/entities/paged_entities.py,sha256=ffw0CbLcOTNDYLQA9gqmjSaTZLRYP_tMnSfa_BmGIyk,8145
84
84
  dtlpy/entities/pipeline.py,sha256=JtWGoCUhVszOVkBNK43fbTt446fkND4wH-Y-fN_llww,20851
85
85
  dtlpy/entities/pipeline_execution.py,sha256=EQhW4W_G1bIPShYbJSAT--1WNQuvxVQbcQ_MCHIX0KI,9938
86
86
  dtlpy/entities/project.py,sha256=ZUx8zA3mr6N145M62R3UDPCCzO1vxfyWO6vjES-bO-g,14653
@@ -145,15 +145,15 @@ dtlpy/miscellaneous/__init__.py,sha256=twbvfsKdiNHNR-vUuy8nUlY3vuUVaSnm-wO83yQde
145
145
  dtlpy/miscellaneous/dict_differ.py,sha256=POJbKR0YyWPf5gFADFpIaNFj9gt2aVBTNof7GJNxTCw,3489
146
146
  dtlpy/miscellaneous/git_utils.py,sha256=CT_CCDsqDqu_bY3cLcOSU6k3Zr6w40t8GJULLUtAJ_U,7971
147
147
  dtlpy/miscellaneous/json_utils.py,sha256=0P4YTlL6o_L7AUrvAeqkqA46MZZK_hDdTrdnmI59y6g,428
148
- dtlpy/miscellaneous/list_print.py,sha256=leEg3RodgYfH5t_0JG8VuM8NiesR8sJLK_mRSttL5pY,4808
149
- dtlpy/miscellaneous/zipping.py,sha256=GMdPhAeHQXeMS5ClaiKWMJWVYQLBLAaJUWxvdYrL4Ro,5337
148
+ dtlpy/miscellaneous/list_print.py,sha256=fBGTMXFUwDG8DD4W6HyR8BTGtbTckLf4W09quNRJm5M,4828
149
+ dtlpy/miscellaneous/zipping.py,sha256=JplTc8UDFvO8WaD5vKuumVLN0lU_-GtHoE0doWKtmKg,5383
150
150
  dtlpy/ml/__init__.py,sha256=vPkyXpc9kcWWZ_PxyPEOsjKBJdEbowLkZr8FZIb_OBM,799
151
151
  dtlpy/ml/base_feature_extractor_adapter.py,sha256=iiEGYAx0Rdn4K46H_FlKrAv3ebTXHSxNVAmio0BxhaI,1178
152
- dtlpy/ml/base_model_adapter.py,sha256=WKpGO5-kLISwXmPy4Dc7e-nIGn3W-tbqJDnhE49WMKA,50930
152
+ dtlpy/ml/base_model_adapter.py,sha256=E7OktF1WbquvgyZixvPkyq7QW0ID3VF9tevXlwpmnuY,51216
153
153
  dtlpy/ml/metrics.py,sha256=BG2E-1Mvjv2e2No9mIJKVmvzqBvLqytKcw3hA7wVUNc,20037
154
154
  dtlpy/ml/predictions_utils.py,sha256=He_84U14oS2Ss7T_-Zj5GDiBZwS-GjMPURUh7u7DjF8,12484
155
155
  dtlpy/ml/summary_writer.py,sha256=dehDi8zmGC1sAGyy_3cpSWGXoGQSiQd7bL_Thoo8yIs,2784
156
- dtlpy/ml/train_utils.py,sha256=R-BHKRfqDoLLhFyLzsRFyJ4E-8iedj9s9oZqy3IO2rg,2404
156
+ dtlpy/ml/train_utils.py,sha256=t607DfyGBRrUQZ9jPmPe4V9Udzfk0hPWuw4OvKZKAeo,2440
157
157
  dtlpy/repositories/__init__.py,sha256=D2YI3ZLlSx0OlgVr8y_E9rsj-IxCDOj0MB6QTlv2NSA,2061
158
158
  dtlpy/repositories/analytics.py,sha256=dQPCYTPAIuyfVI_ppR49W7_GBj0033feIm9Gd7LW1V0,2966
159
159
  dtlpy/repositories/annotations.py,sha256=idTKzanNt-ncB0eIKE5p6WclrVGNjceI2Y7dAzDFtzY,43595
@@ -165,15 +165,15 @@ dtlpy/repositories/codebases.py,sha256=pvcZxdrq0-zWysVbdXjUOhnfcF6hJD8v5VclNZ-zh
165
165
  dtlpy/repositories/collections.py,sha256=z-nkR33rq-MzkEff7DDSBlfsI_lkCDFwQZIlMaIT5rM,13514
166
166
  dtlpy/repositories/commands.py,sha256=MgXhXxbAzBa2QJM9Z5EsQZRaZ4fGBM17ALoldxi8xYA,5848
167
167
  dtlpy/repositories/compositions.py,sha256=H417BvlQAiWr5NH2eANFke6CfEO5o7DSvapYpf7v5Hk,2150
168
- dtlpy/repositories/computes.py,sha256=vHMvKVX8U-33nB5TR_Nfs9euGr9MbkiwTHuq4yZojM8,14134
168
+ dtlpy/repositories/computes.py,sha256=V8kVTwXc5lhxrp5e7zxTXvKcVKtg6crCqkL5zQHtKZo,14639
169
169
  dtlpy/repositories/datasets.py,sha256=p0HBbTGrxAQ8h9tJsp1jRasPbwnMAtXQ4_sIef9_590,59358
170
- dtlpy/repositories/downloader.py,sha256=XJC9FhlXgHrA8Ae9bftrbs4YKFCcZoEYJAh6Bt6zGhU,45167
170
+ dtlpy/repositories/downloader.py,sha256=X5-vspCoTW7_QZuPdaZgOSTvM7jYU0Uf7o5PELZNY9g,45329
171
171
  dtlpy/repositories/dpks.py,sha256=dxZpGloZGH6MJG9ZFff5l3GlXw6i-52n9kxL-QiHosQ,18516
172
- dtlpy/repositories/drivers.py,sha256=fF0UuHCyBzop8pHfryex23mf0kVFAkqzNdOmwBbaWxY,10204
172
+ dtlpy/repositories/drivers.py,sha256=2fMzzt0ovNeYpfrAOqz4h14C5D7GCLLA5SDj9rQ4UfI,10817
173
173
  dtlpy/repositories/executions.py,sha256=4UoU6bnB3kl5cMuF1eJvDecfZCaB06gKWxPfv6_g1_k,32598
174
174
  dtlpy/repositories/feature_sets.py,sha256=UowMDAl_CRefRB5oZzubnsjU_OFgiPPdQXn8q2j4Kuw,9666
175
- dtlpy/repositories/features.py,sha256=A_RqTJxzjTh-Wbm0uXaoTNyHSfCLbeiH38iB11p2ifY,9915
176
- dtlpy/repositories/integrations.py,sha256=gSgaVp4MkcdrJMnXVr_fl4xrzhfJba8BFbBJTuJPwXc,18159
175
+ dtlpy/repositories/features.py,sha256=HZR-sLSdwiWdbFsnuZrTDSff0oRK2hwFBQ6UK2yVAvk,9923
176
+ dtlpy/repositories/integrations.py,sha256=Y5c37fQCaIkw1p5jPEbAqytgRVXuqe771eHC1hNDE7A,19491
177
177
  dtlpy/repositories/items.py,sha256=S1OWZ6s8AbVXMiLtCfBBiYPMG8OLqdUhKMHuZWE3bnU,40029
178
178
  dtlpy/repositories/messages.py,sha256=QU0Psckg6CA_Tlw9AVxqa-Ay1fRM4n269sSIJkH9o7E,3066
179
179
  dtlpy/repositories/models.py,sha256=uYVw319dMgVoXReb9VKl0b3v0_kgetROQaf56cvgwqs,38297
@@ -198,16 +198,16 @@ dtlpy/repositories/uploader.py,sha256=Keu_1fgJPiBpUgBGrAfRErejUK_UvqLTNdwK-BmTPY
198
198
  dtlpy/repositories/webhooks.py,sha256=IIpxOJ-7KeQp1TY9aJZz-FuycSjAoYx0TDk8z86KAK8,9033
199
199
  dtlpy/services/__init__.py,sha256=VfVJy2otIrDra6i7Sepjyez2ujiE6171ChQZp-YgxsM,904
200
200
  dtlpy/services/aihttp_retry.py,sha256=tgntZsAY0dW9v08rkjX1T5BLNDdDd8svtgn7nH8DSGU,5022
201
- dtlpy/services/api_client.py,sha256=HPG326fG6h0m0_w0JIRhpN-GIXxyeZU5BU3MkN-4kw4,71416
201
+ dtlpy/services/api_client.py,sha256=G0NL5RvHnjB4ET2z1FmxOm2up4xgPkQgU13NTPbb1VI,71464
202
202
  dtlpy/services/api_reference.py,sha256=cW-B3eoi9Xs3AwI87_Kr6GV_E6HPoC73aETFaGz3A-0,1515
203
203
  dtlpy/services/async_utils.py,sha256=kaYHTPw0Lg8PeJJq8whPyzrBYkzD7offs5hsKRZXJm8,3960
204
204
  dtlpy/services/calls_counter.py,sha256=gr0io5rIsO5-7Cgc8neA1vK8kUtYhgFPmDQ2jXtiZZs,1036
205
205
  dtlpy/services/check_sdk.py,sha256=H4KL5xrmNGfR9fUSxTVUeBm_3YFGjkwUZpFnqhFBJyI,2617
206
206
  dtlpy/services/cookie.py,sha256=sSZR1QV4ienCcZ8lEK_Y4nZYBgAxO3kHrcBXFKGcmwQ,3694
207
- dtlpy/services/create_logger.py,sha256=WFQjuvCuwrZoftFaU9jQkmEcOrL1XD-NqsuBqb5_SN4,6332
207
+ dtlpy/services/create_logger.py,sha256=2dC39CCmc17H4LYRpY0bRIT4S50UTGNOBPYIqJnrsIU,6350
208
208
  dtlpy/services/events.py,sha256=mpcu8RusLPrBcJEbWR61uFb4FiU_dQv3xoa7uM-rTcY,3686
209
209
  dtlpy/services/logins.py,sha256=YMMi_C_A97ZNtIlREE30hpBRhULAZJtORiVL6OL0oPQ,8766
210
- dtlpy/services/reporter.py,sha256=4zi9-bshKAPHG2XMOXS39cFZ0mhqNc3Qa9uaMN7CSZ8,9122
210
+ dtlpy/services/reporter.py,sha256=i-hlvX6_olRTC4PbwyOq2kVjWghuUpyNUETVI-KSVpw,9140
211
211
  dtlpy/services/service_defaults.py,sha256=a7KoqkVmn2TXmM9gN9JRaVVtcG2b8JGIieVnaZeEaao,3860
212
212
  dtlpy/utilities/__init__.py,sha256=ncQD1O5lZ7L9n9rNRBivyqNVFDZyQcmqn-X-wyQhhIs,898
213
213
  dtlpy/utilities/base_package_runner.py,sha256=tux_XCiCoOhMPtFaQludzhj0ny6OTKhyoN1aXjPal54,8522
@@ -226,19 +226,19 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
226
226
  dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
227
227
  dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
228
228
  dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
229
- dtlpy-1.110.3.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
230
- dtlpy-1.110.3.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
231
- dtlpy-1.110.3.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
229
+ dtlpy-1.112.9.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
230
+ dtlpy-1.112.9.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
231
+ dtlpy-1.112.9.data/scripts/dlp.py,sha256=ZpfJvYE1_OTSorEYBphqTOutnHSb5TqOXh0y_mUCTJs,4393
232
232
  tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
233
233
  tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
234
234
  tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
235
235
  tests/assets/models_flow/main.py,sha256=vnDKyVZaae2RFpvwS22Hzi6Dt2LJerH4yQrmKtaT8_g,2123
236
236
  tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
237
237
  tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
238
- tests/features/environment.py,sha256=JcM956BxLBRvDqy6Kr1Nxd1FY_gxbE6XztZBVBMCGYM,18897
239
- dtlpy-1.110.3.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
240
- dtlpy-1.110.3.dist-info/METADATA,sha256=fDBFNra1w6f_hs3IdqJy7T-ZfvcfFXvG72-7cjMNdK8,5469
241
- dtlpy-1.110.3.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
242
- dtlpy-1.110.3.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
243
- dtlpy-1.110.3.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
244
- dtlpy-1.110.3.dist-info/RECORD,,
238
+ tests/features/environment.py,sha256=ZZNSN8TObnNMkX0IQhSolAs_9I_V9hHFL_IZjG0jrGU,18909
239
+ dtlpy-1.112.9.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
240
+ dtlpy-1.112.9.dist-info/METADATA,sha256=2C1bQeEFV6GqQlo05c8mbX3WL-lWjZbydg5CkrP739I,5469
241
+ dtlpy-1.112.9.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
242
+ dtlpy-1.112.9.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
243
+ dtlpy-1.112.9.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
244
+ dtlpy-1.112.9.dist-info/RECORD,,
@@ -294,10 +294,10 @@ def after_tag(context, tag):
294
294
  pass
295
295
  elif tag == 'wip':
296
296
  pass
297
- elif any(i_tag in tag for i_tag in ['DAT-', 'qa-', 'rc_only', 'skip_test', 'ATP', 'AIRGAPPED']):
297
+ elif any(i_tag in tag for i_tag in ['DAT-', 'qa-', 'rc_only', 'skip_test', 'ATP', 'AIRGAPPED', 'DM-cache']):
298
298
  pass
299
299
  else:
300
- raise ValueError('unknown tag: {}'.format(tag))
300
+ raise ValueError('Unknown tag: {}'.format(tag))
301
301
 
302
302
 
303
303
  @fixture
File without changes