dtlpy 1.89.29__py3-none-any.whl → 1.90.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. dtlpy/__init__.py +1 -0
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/assets/lock_open.png +0 -0
  4. dtlpy/dlp/command_executor.py +15 -0
  5. dtlpy/entities/app.py +29 -1
  6. dtlpy/entities/base_entity.py +0 -1
  7. dtlpy/entities/dataset.py +7 -1
  8. dtlpy/entities/dpk.py +16 -7
  9. dtlpy/entities/filters.py +6 -0
  10. dtlpy/entities/model.py +7 -2
  11. dtlpy/entities/package_function.py +1 -0
  12. dtlpy/entities/package_module.py +1 -0
  13. dtlpy/ml/base_feature_extractor_adapter.py +28 -0
  14. dtlpy/ml/base_model_adapter.py +19 -9
  15. dtlpy/repositories/__init__.py +1 -0
  16. dtlpy/repositories/apps.py +68 -1
  17. dtlpy/repositories/features.py +8 -2
  18. dtlpy/repositories/models.py +34 -6
  19. dtlpy/repositories/recipes.py +3 -2
  20. dtlpy/repositories/schema.py +120 -0
  21. dtlpy/repositories/services.py +4 -1
  22. dtlpy/services/api_client.py +12 -3
  23. dtlpy/services/logins.py +49 -18
  24. dtlpy/utilities/dataset_generators/dataset_generator.py +2 -1
  25. {dtlpy-1.89.29.dist-info → dtlpy-1.90.39.dist-info}/METADATA +2 -2
  26. {dtlpy-1.89.29.dist-info → dtlpy-1.90.39.dist-info}/RECORD +38 -31
  27. tests/assets/__init__.py +0 -0
  28. tests/assets/models_flow/__init__.py +0 -0
  29. tests/assets/models_flow/failedmain.py +52 -0
  30. tests/assets/models_flow/main.py +51 -0
  31. tests/assets/models_flow/main_model.py +54 -0
  32. {dtlpy-1.89.29.data → dtlpy-1.90.39.data}/scripts/dlp +0 -0
  33. {dtlpy-1.89.29.data → dtlpy-1.90.39.data}/scripts/dlp.bat +0 -0
  34. {dtlpy-1.89.29.data → dtlpy-1.90.39.data}/scripts/dlp.py +0 -0
  35. {dtlpy-1.89.29.dist-info → dtlpy-1.90.39.dist-info}/LICENSE +0 -0
  36. {dtlpy-1.89.29.dist-info → dtlpy-1.90.39.dist-info}/WHEEL +0 -0
  37. {dtlpy-1.89.29.dist-info → dtlpy-1.90.39.dist-info}/entry_points.txt +0 -0
  38. {dtlpy-1.89.29.dist-info → dtlpy-1.90.39.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py CHANGED
@@ -177,6 +177,7 @@ login = client_api.login
177
177
  logout = client_api.logout
178
178
  login_token = client_api.login_token
179
179
  login_secret = client_api.login_secret
180
+ login_api_key = client_api.login_api_key
180
181
  login_m2m = client_api.login_m2m
181
182
  add_environment = client_api.add_environment
182
183
  setenv = client_api.setenv
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.89.29'
1
+ version = '1.90.39'
Binary file
@@ -59,6 +59,9 @@ class CommandExecutor:
59
59
  self.dl.login_token(args.token)
60
60
  self.dl.info(with_token=False)
61
61
 
62
+ def login_api_key(self, args):
63
+ self.dl.login_api_key(api_key=args.api_key)
64
+
62
65
  def login_secret(self, args):
63
66
  self.login_m2m(args=args)
64
67
 
@@ -587,6 +590,18 @@ class CommandExecutor:
587
590
  elif args.app == 'update':
588
591
  # TODO: I think it changed, not implemented
589
592
  logger.info('App updated successfully')
593
+ elif args.app == 'resume':
594
+ succeed = self.utils.get_apps_repo(args).resume(app_id=args.app_id)
595
+ if succeed is True:
596
+ logger.info('Resumed application successfully')
597
+ else:
598
+ logger.info('Application resume failed')
599
+ elif args.app == 'pause':
600
+ succeed = self.utils.get_apps_repo(args).pause(app_id=args.app_id)
601
+ if succeed is True:
602
+ logger.info('Paused application successfully')
603
+ else:
604
+ logger.info('Application pause failed')
590
605
  elif args.app == 'install':
591
606
  app = self.utils.get_apps_repo(args).install(
592
607
  dpk=self.utils.get_dpks_repo(args).get(dpk_id=args.dpk_id),
dtlpy/entities/app.py CHANGED
@@ -46,6 +46,7 @@ class App(entities.BaseEntity):
46
46
  routes = attr.ib(type=dict)
47
47
  custom_installation = attr.ib(type=dict)
48
48
  metadata = attr.ib(type=dict)
49
+ status = attr.ib(type=entities.CompositionStatus)
49
50
 
50
51
  # sdk
51
52
  _project = attr.ib(type=entities.Project, repr=False)
@@ -98,6 +99,30 @@ class App(entities.BaseEntity):
98
99
  """
99
100
  return self.apps.update(self)
100
101
 
102
+ def resume(self):
103
+ """
104
+ Resume the current app
105
+
106
+ :return bool whether the operation ran successfully or not
107
+
108
+ **Example**
109
+ .. code-block:: python
110
+ succeed = app.resume()
111
+ """
112
+ return self.apps.resume(self)
113
+
114
+ def pause(self):
115
+ """
116
+ Pause the current app
117
+
118
+ :return bool whether the operation ran successfully or not
119
+
120
+ **Example**
121
+ .. code-block:: python
122
+ succeed = app.pause()
123
+ """
124
+ return self.apps.pause(self)
125
+
101
126
  @staticmethod
102
127
  def _protected_from_json(_json, client_api, project, is_fetched=True):
103
128
  """
@@ -150,6 +175,8 @@ class App(entities.BaseEntity):
150
175
  _json['customInstallation'] = self.custom_installation
151
176
  if self.metadata is not None:
152
177
  _json['metadata'] = self.metadata
178
+ if self.status is not None:
179
+ _json['status'] = self.status
153
180
 
154
181
  return _json
155
182
 
@@ -172,7 +199,8 @@ class App(entities.BaseEntity):
172
199
  custom_installation=_json.get('customInstallation', {}),
173
200
  client_api=client_api,
174
201
  project=project,
175
- metadata=_json.get('metadata', None)
202
+ metadata=_json.get('metadata', None),
203
+ status=_json.get('status', None)
176
204
  )
177
205
  app.is_fetched = is_fetched
178
206
  return app
@@ -1,4 +1,3 @@
1
- import typing_extensions
2
1
  import functools
3
2
  import logging
4
3
  import enum
dtlpy/entities/dataset.py CHANGED
@@ -281,7 +281,7 @@ class Dataset(entities.BaseEntity):
281
281
  def set_repositories(self):
282
282
  reps = namedtuple('repositories',
283
283
  field_names=['items', 'recipes', 'datasets', 'assignments', 'tasks', 'annotations',
284
- 'ontologies', 'features', 'settings'])
284
+ 'ontologies', 'features', 'settings', 'schema'])
285
285
  if self._project is None:
286
286
  datasets = repositories.Datasets(client_api=self._client_api, project=self._project)
287
287
  features = repositories.Features(client_api=self._client_api, project=self._project)
@@ -299,6 +299,7 @@ class Dataset(entities.BaseEntity):
299
299
  ontologies=repositories.Ontologies(client_api=self._client_api, dataset=self),
300
300
  features=features,
301
301
  settings=repositories.Settings(client_api=self._client_api, dataset=self),
302
+ schema=repositories.Schema(client_api=self._client_api, dataset=self)
302
303
  )
303
304
 
304
305
  @property
@@ -346,6 +347,11 @@ class Dataset(entities.BaseEntity):
346
347
  assert isinstance(self._repositories.features, repositories.Features)
347
348
  return self._repositories.features
348
349
 
350
+ @property
351
+ def schema(self):
352
+ assert isinstance(self._repositories.schema, repositories.Schema)
353
+ return self._repositories.schema
354
+
349
355
  @property
350
356
  def project(self):
351
357
  if self._project is None:
dtlpy/entities/dpk.py CHANGED
@@ -104,6 +104,7 @@ class PipelineNode(entities.DlEntity):
104
104
  description: str = entities.DlProperty(location=['description'], _type=str)
105
105
  configuration: dict = entities.DlProperty(location=['configuration'], _type=dict)
106
106
  scope: CustomNodeScope = entities.DlProperty(location=['scope'], _type=CustomNodeScope)
107
+ compute_config: str = entities.DlProperty(location=['computeConfig'], _type=str, default=None)
107
108
 
108
109
  def to_json(self) -> dict:
109
110
  return self._dict.copy()
@@ -238,6 +239,7 @@ class Dpk(entities.DlEntity):
238
239
  scope: dict = entities.DlProperty(location=['scope'], _type=str)
239
240
  context: dict = entities.DlProperty(location=['context'], _type=dict)
240
241
  metadata: dict = entities.DlProperty(location=['metadata'], _type=dict)
242
+ dependencies: dict = entities.DlProperty(location=['dependencies'], _type=List[dict])
241
243
 
242
244
  # defaults
243
245
  components: Components = entities.DlProperty(location=['components'], _kls='Components')
@@ -264,12 +266,12 @@ class Dpk(entities.DlEntity):
264
266
  reps = namedtuple('repositories',
265
267
  field_names=['dpks', 'codebases', 'organizations', 'services'])
266
268
 
267
- self.__repositories = reps(dpks=repositories.Dpks(client_api=self.client_api, project=self.project),
268
- codebases=repositories.Codebases(client_api=self.client_api),
269
- organizations=repositories.Organizations(client_api=self.client_api),
270
- services=repositories.Services(client_api=self.client_api, project=self.project,
271
- package=self),
272
- )
269
+ self.__repositories = reps(
270
+ dpks=repositories.Dpks(client_api=self.client_api, project=self.project),
271
+ codebases=repositories.Codebases(client_api=self.client_api),
272
+ organizations=repositories.Organizations(client_api=self.client_api),
273
+ services=repositories.Services(client_api=self.client_api, project=self.project, package=self),
274
+ )
273
275
 
274
276
  return self.__repositories
275
277
 
@@ -288,6 +290,11 @@ class Dpk(entities.DlEntity):
288
290
  assert isinstance(self._repositories.dpks, repositories.Dpks)
289
291
  return self._repositories.dpks
290
292
 
293
+ @property
294
+ def services(self):
295
+ assert isinstance(self._repositories.services, repositories.Services)
296
+ return self._repositories.services
297
+
291
298
  ###########
292
299
  # methods #
293
300
  ###########
@@ -424,9 +431,11 @@ class Dpk(entities.DlEntity):
424
431
  :return: App entity
425
432
  :rtype: dtlpy.entities.App
426
433
  """
427
- return cls(
434
+ res = cls(
428
435
  _dict=_json,
429
436
  client_api=client_api,
430
437
  project=project,
431
438
  is_fetched=is_fetched
432
439
  )
440
+
441
+ return res
dtlpy/entities/filters.py CHANGED
@@ -456,6 +456,9 @@ class Filters:
456
456
  _json['systemSpace'] = self._system_space
457
457
  return _json
458
458
 
459
+ def print(self, indent=2):
460
+ print(json.dumps(self.prepare(), indent=indent))
461
+
459
462
  def sort_by(self, field, value: FiltersOrderByDirection = FiltersOrderByDirection.ASCENDING):
460
463
  """
461
464
  sort the filter
@@ -549,3 +552,6 @@ class SingleFilter:
549
552
  _json[self.field] = value
550
553
 
551
554
  return _json
555
+
556
+ def print(self, indent=2):
557
+ print(json.dumps(self.prepare(), indent=indent))
dtlpy/entities/model.py CHANGED
@@ -118,6 +118,7 @@ class Model(entities.BaseEntity):
118
118
  _repositories = attr.ib(repr=False)
119
119
  _ontology = attr.ib(repr=False, default=None)
120
120
  updated_by = attr.ib(default=None)
121
+ app = attr.ib(default=None)
121
122
 
122
123
  @staticmethod
123
124
  def _protected_from_json(_json, client_api, project, package, is_fetched=True):
@@ -198,7 +199,8 @@ class Model(entities.BaseEntity):
198
199
  input_type=_json.get('inputType', None),
199
200
  output_type=_json.get('outputType', None),
200
201
  module_name=_json.get('moduleName', None),
201
- updated_by=_json.get('updatedBy', None)
202
+ updated_by=_json.get('updatedBy', None),
203
+ app=_json.get('app', None)
202
204
  )
203
205
  inst.is_fetched = is_fetched
204
206
  return inst
@@ -226,7 +228,8 @@ class Model(entities.BaseEntity):
226
228
  attr.fields(Model).updated_at,
227
229
  attr.fields(Model).input_type,
228
230
  attr.fields(Model).output_type,
229
- attr.fields(Model).updated_by
231
+ attr.fields(Model).updated_by,
232
+ attr.fields(Model).app
230
233
  ))
231
234
  _json['packageId'] = self.package_id
232
235
  _json['datasetId'] = self.dataset_id
@@ -248,6 +251,8 @@ class Model(entities.BaseEntity):
248
251
 
249
252
  if self.updated_by:
250
253
  _json['updatedBy'] = self.updated_by
254
+ if self.app:
255
+ _json['app'] = self.app
251
256
 
252
257
  return _json
253
258
 
@@ -64,6 +64,7 @@ class PackageFunction(entities.DlEntity):
64
64
  _kls='FunctionIO')
65
65
  inputs: typing.Union[typing.List['entities.FunctionIO'], None] = entities.DlProperty(location=['input'],
66
66
  _kls='FunctionIO')
67
+ compute_config: str = entities.DlProperty(location=['computeConfig'], _type=str, default=None)
67
68
 
68
69
  def __repr__(self):
69
70
  # TODO need to move to DlEntity
@@ -28,6 +28,7 @@ class PackageModule(entities.DlEntity):
28
28
  _type=list,
29
29
  default=list(),
30
30
  _kls='PackageFunction')
31
+ compute_config: str = entities.DlProperty(location=['computeConfig'], _type=str, default=None)
31
32
 
32
33
  def __repr__(self):
33
34
  # TODO need to move to DlEntity
@@ -0,0 +1,28 @@
1
+ from abc import ABC
2
+
3
+ from base_model_adapter import BaseModelAdapter
4
+ from .. import entities
5
+
6
+
7
+ class BaseFeatureExtractorAdapter(BaseModelAdapter, ABC):
8
+ def __int__(self, model_entity: entities.Model = None):
9
+ super().__init__(model_entity)
10
+
11
+ def extract_features(self, batch: list, **kwargs):
12
+ """ Runs inference with the model, but does not predict. Instead, extracts features for the input batch.
13
+
14
+ Virtual method - need to implement
15
+
16
+ :param batch: `list` a list containing a batch of items whose features will be extracted
17
+ """
18
+ raise NotImplementedError("Please implement 'extract_features' method in {}".format(self.__class__.__name__))
19
+
20
+ def extract_dataset_features(self, dataset: entities.Dataset, **kwargs):
21
+ """ Runs inference to extract features for all items in a dataset.
22
+
23
+ Virtual method - need to implement
24
+
25
+ :param dataset: `entities.Dataset` dataset entity whose items will have their features extracted
26
+ """
27
+ raise NotImplementedError("Please implement 'extract_dataset_features' method in "
28
+ "{}".format(self.__class__.__name__))
@@ -157,6 +157,8 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
157
157
  """
158
158
  import dtlpymetrics
159
159
  compare_types = model.output_type
160
+ if not filters:
161
+ filters = entities.Filters()
160
162
  if filters is not None and isinstance(filters, dict):
161
163
  filters = entities.Filters(custom_filter=filters)
162
164
  model = dtlpymetrics.scoring.create_model_score(model=model,
@@ -408,10 +410,13 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
408
410
  self.logger.debug("Predicting dataset (name:{}, id:{}, using batch size {}".format(dataset.name,
409
411
  dataset.id,
410
412
  batch_size))
413
+ if not filters:
414
+ filters = entities.Filters()
411
415
  if filters is not None and isinstance(filters, dict):
412
416
  filters = entities.Filters(custom_filter=filters)
413
417
  pages = dataset.items.list(filters=filters, page_size=batch_size)
414
- self.predict_items(items=list(pages.all()),
418
+ items = [item for item in pages.all() if item.type == 'file']
419
+ self.predict_items(items=items,
415
420
  with_upload=with_upload,
416
421
  cleanup=cleanup,
417
422
  batch_size=batch_size,
@@ -539,6 +544,8 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
539
544
  # Predicting #
540
545
  ##############
541
546
  logger.info(f"Calling prediction, dataset: {dataset.name!r} ({model.id!r}), filters: {filters}")
547
+ if not filters:
548
+ filters = entities.Filters()
542
549
  self.predict_dataset(dataset=dataset,
543
550
  filters=filters,
544
551
  with_upload=True)
@@ -630,18 +637,21 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
630
637
  """
631
638
  for prediction in predictions:
632
639
  if prediction.type == entities.AnnotationType.SEGMENTATION:
640
+ color = None
633
641
  try:
634
642
  color = item.dataset._get_ontology().color_map.get(prediction.label, None)
635
643
  except (exceptions.BadRequest, exceptions.NotFound):
636
- color = None
637
- logger.warning("Can't get annotation color from item's dataset, using model's dataset.")
644
+ ...
638
645
  if color is None:
639
- try:
640
- color = self.model_entity.dataset._get_ontology().color_map.get(prediction.label,
641
- (255, 255, 255))
642
- except (exceptions.BadRequest, exceptions.NotFound):
643
- logger.warning("Can't get annotation color from model's dataset, using default.")
644
- color = prediction.color
646
+ if self.model_entity._dataset is not None:
647
+ try:
648
+ color = self.model_entity.dataset._get_ontology().color_map.get(prediction.label,
649
+ (255, 255, 255))
650
+ except (exceptions.BadRequest, exceptions.NotFound):
651
+ ...
652
+ if color is None:
653
+ logger.warning("Can't get annotation color from model's dataset, using default.")
654
+ color = prediction.color
645
655
  prediction.color = color
646
656
 
647
657
  prediction.item_id = item.id
@@ -50,3 +50,4 @@ from .apps import Apps
50
50
  from .dpks import Dpks
51
51
  from .messages import Messages
52
52
  from .compositions import Compositions
53
+ from .schema import Schema
@@ -253,7 +253,8 @@ class Apps:
253
253
  error='400',
254
254
  message='You must provide an identifier in inputs')
255
255
  if app_name is not None:
256
- app_id = self.__get_by_name(app_name)
256
+ app = self.__get_by_name(app_name)
257
+ app_id = app.id
257
258
 
258
259
  success, response = self._client_api.gen_request(req_type='delete', path='/apps/{}'.format(app_id))
259
260
  if not success:
@@ -261,3 +262,69 @@ class Apps:
261
262
 
262
263
  logger.debug(f"App deleted successfully (id: {app_id}, name: {app_name}")
263
264
  return success
265
+
266
+ def resume(self, app: entities.App = None, app_id: str = None) -> bool:
267
+ """
268
+ Activate an app entity.
269
+
270
+ Note: You are required to add either app or app_id.
271
+
272
+ :param entities.App app: the app entity
273
+ :param str app_id: optional - the id of the app.
274
+ :return whether we succeed activating the specified app.
275
+ :rtype bool
276
+
277
+ **Example**
278
+ .. code-block:: python
279
+ # succeed = dl.apps.resume(app)
280
+ """
281
+ if app_id is not None and app is None:
282
+ app = self.get(app_id=app_id)
283
+ if app is None:
284
+ raise exceptions.PlatformException(error='400', message='You must provide app or app_id')
285
+
286
+ if app and app.status == entities.CompositionStatus.INSTALLED:
287
+ raise exceptions.PlatformException(
288
+ error='400',
289
+ message='Application is already active'
290
+ )
291
+
292
+ success, response = self._client_api.gen_request(req_type='post', path='/apps/{}/activate'.format(app.id))
293
+ if not success:
294
+ raise exceptions.PlatformException(response)
295
+
296
+ logger.debug(f"App resumed successfully (id: {app.id}, name: {app.name}")
297
+ return success
298
+
299
+ def pause(self, app: entities.App = None, app_id: str = None) -> bool:
300
+ """
301
+ Pausing an app entity.
302
+
303
+ Note: You are required to add either app or app_id.
304
+
305
+ :param entities.App app: the app entity
306
+ :param str app_id: optional - the id of the app.
307
+ :return whether we succeed pausing the specified app.
308
+ :rtype bool
309
+
310
+ **Example**
311
+ .. code-block:: python
312
+ # succeed = dl.apps.pause(app)
313
+ """
314
+ if app_id is not None and app is None:
315
+ app = self.get(app_id=app_id)
316
+ if app is None:
317
+ raise exceptions.PlatformException(error='400', message='You must provide app or app_id')
318
+
319
+ if app and app.status == entities.CompositionStatus.UNINSTALLED:
320
+ raise exceptions.PlatformException(
321
+ error='400',
322
+ message='Application is already inactive'
323
+ )
324
+
325
+ success, response = self._client_api.gen_request(req_type='post', path='/apps/{}/deactivate'.format(app.id))
326
+ if not success:
327
+ raise exceptions.PlatformException(response)
328
+
329
+ logger.debug(f"App paused successfully (id: {app.id}, name: {app.name}")
330
+ return success
@@ -18,6 +18,8 @@ class Features:
18
18
  item: entities.Item = None,
19
19
  annotation: entities.Annotation = None,
20
20
  feature_set: entities.FeatureSet = None):
21
+ if project is not None and project_id is None:
22
+ project_id = project.id
21
23
  self._project = project
22
24
  self._project_id = project_id
23
25
  self._item = item
@@ -34,6 +36,9 @@ class Features:
34
36
 
35
37
  @property
36
38
  def project(self) -> entities.Project:
39
+ if self._project is None and self._project_id is None and self._item is not None:
40
+ self._project = self._item.project
41
+ self._project_id = self._project.id
37
42
  if self._project is None and self._project_id is not None:
38
43
  # get from id
39
44
  self._project = repositories.Projects(client_api=self._client_api).get(project_id=self._project_id)
@@ -96,8 +101,9 @@ class Features:
96
101
  filters.add(field='featureSetId', values=self._feature_set.id)
97
102
  if self._item is not None:
98
103
  filters.add(field='entityId', values=self._item.id)
99
- if self._project_id is not None:
100
- filters.context = {"projects": [self._project_id]}
104
+ if self._project_id is None:
105
+ self._project_id = self.project.id
106
+ filters.context = {"projects": [self._project_id]}
101
107
 
102
108
  paged = entities.PagedEntities(items_repository=self,
103
109
  filters=filters,
@@ -199,7 +199,7 @@ class Models:
199
199
  def _set_model_filter(self,
200
200
  metadata: dict,
201
201
  train_filter: entities.Filters = None,
202
- validation_filter: entities.Filters = None, ):
202
+ validation_filter: entities.Filters = None):
203
203
  if metadata is None:
204
204
  metadata = {}
205
205
  if 'system' not in metadata:
@@ -285,6 +285,7 @@ class Models:
285
285
  output_type=None,
286
286
  train_filter: entities.Filters = None,
287
287
  validation_filter: entities.Filters = None,
288
+ app: entities.App = None
288
289
  ) -> entities.Model:
289
290
  """
290
291
  Create a Model entity
@@ -306,6 +307,7 @@ class Models:
306
307
  :param str output_type: dl.AnnotationType - the type of annotations the model produces (class, box segment, text, etc)
307
308
  :param dtlpy.entities.filters.Filters train_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model train
308
309
  :param dtlpy.entities.filters.Filters validation_filter: Filters entity or a dictionary to define the items' scope in the specified dataset_id for the model validation
310
+ :param dtlpy.entities.App app: App entity to connect the model to
309
311
  :return: Model Entity
310
312
 
311
313
  **Example**:
@@ -342,10 +344,12 @@ class Models:
342
344
  raise exceptions.PlatformException('Please provide project_id')
343
345
  project_id = self._project.id
344
346
  else:
345
- if project_id != self._project_id and not package.is_global:
346
- logger.warning(
347
- "Note! you are specified project_id {!r} which is different from repository context: {!r}".format(
348
- project_id, self._project_id))
347
+ if project_id != self._project_id:
348
+ if (isinstance(package, entities.Package) and not package.is_global) or \
349
+ (isinstance(package, entities.Dpk) and not package.scope != 'public'):
350
+ logger.warning(
351
+ "Note! you are specified project_id {!r} which is different from repository context: {!r}".format(
352
+ project_id, self._project_id))
349
353
 
350
354
  if model_artifacts is None:
351
355
  model_artifacts = []
@@ -367,6 +371,29 @@ class Models:
367
371
  'outputType': output_type,
368
372
  }
369
373
 
374
+ if app is not None:
375
+ if not isinstance(package, entities.Dpk):
376
+ raise ValueError('package must be a Dpk entity')
377
+ if app.dpk_name != package.name or app.dpk_version != package.version:
378
+ raise ValueError('App and package must be the same')
379
+ component_name = None
380
+ compute_config = None
381
+ for model in package.components.models:
382
+ if model['name'] == model_name:
383
+ component_name = model['name']
384
+ compute_config = model.get('computeConfigs', None)
385
+ break
386
+ if component_name is None:
387
+ raise ValueError('Model name not found in package')
388
+ payload['app'] = {
389
+ "id": app.id,
390
+ "componentName": component_name,
391
+ "dpkName": package.name,
392
+ "dpkVersion": package.version
393
+ }
394
+ if compute_config is not None:
395
+ payload['app']['computeConfig'] = compute_config
396
+
370
397
  if configuration is not None:
371
398
  payload['configuration'] = configuration
372
399
 
@@ -380,7 +407,8 @@ class Models:
380
407
  payload['status'] = status
381
408
 
382
409
  if train_filter or validation_filter:
383
- metadata = self._set_model_filter(metadata={}, train_filter=train_filter,
410
+ metadata = self._set_model_filter(metadata={},
411
+ train_filter=train_filter,
384
412
  validation_filter=validation_filter)
385
413
  payload['metadata'] = metadata
386
414
 
@@ -1,5 +1,6 @@
1
1
  import logging
2
2
  import traceback
3
+ import urllib.parse
3
4
 
4
5
  from .. import entities, miscellaneous, repositories, exceptions, _api_reference
5
6
  from ..services.api_client import ApiClient
@@ -198,9 +199,9 @@ class Recipes:
198
199
 
199
200
  def _list(self, filters: entities.Filters):
200
201
  url = filters.generate_url_query_params('/recipes')
201
-
202
+ encoded_url = urllib.parse.quote(url, safe='/:?=&')
202
203
  # request
203
- success, response = self._client_api.gen_request(req_type='get', path=url)
204
+ success, response = self._client_api.gen_request(req_type='get', path=encoded_url)
204
205
  if not success:
205
206
  raise exceptions.PlatformException(response)
206
207
  return response.json()
@@ -0,0 +1,120 @@
1
+ from typing import List
2
+ import logging
3
+
4
+ from .. import entities, exceptions
5
+ from ..services.api_client import ApiClient
6
+
7
+ logger = logging.getLogger(name='dtlpy')
8
+
9
+
10
+ class UnsearchablePaths:
11
+ """
12
+ Unsearchable Paths
13
+
14
+ """
15
+
16
+ def __init__(self, client_api: ApiClient, dataset: entities.Dataset = None):
17
+ self._client_api = client_api
18
+ self._dataset = dataset
19
+
20
+ @property
21
+ def dataset(self) -> entities.Dataset:
22
+ if self._dataset is None:
23
+ raise exceptions.PlatformException(
24
+ error='2001',
25
+ message='Cannot perform action WITHOUT Dataset entity in {} repository.'.format(
26
+ self.__class__.__name__) + ' Please use dataset.schema or set a dataset')
27
+ assert isinstance(self._dataset, entities.Dataset)
28
+ return self._dataset
29
+
30
+ def __unsearchable_paths_request(self, payload):
31
+ """
32
+ Set unsearchable paths in dataset schema
33
+ """
34
+ success, response = self._client_api.gen_request(req_type='post',
35
+ path='/datasets/{}/schema/items'.format(self.dataset.id),
36
+ json_req=
37
+ {
38
+ "unsearchablePaths": payload
39
+ })
40
+ if not success:
41
+ raise exceptions.PlatformException(response)
42
+
43
+ resp = response.json()
44
+ if isinstance(resp, dict):
45
+ command = entities.Command.from_json(_json=resp,
46
+ client_api=self._client_api)
47
+
48
+ try:
49
+ command.wait()
50
+ except Exception as e:
51
+ logger.error('Command failed: {}'.format(e))
52
+ else:
53
+ logger.warning(resp)
54
+ return success
55
+
56
+ def add(self, paths: List[str]):
57
+ """
58
+ Add metadata paths to `unsearchablePaths` to exclude keys under these paths from indexing, making them unsearchable through the Dataset Browser UI and DQL queries.
59
+
60
+ :param paths: list of paths to create
61
+ :return: true if success, else raise exception
62
+ :rtype: bool
63
+
64
+ **Example**:
65
+
66
+ .. code-block:: python
67
+
68
+ success = dataset.schema.unsearchable_paths.add(paths=['metadata.key1', 'metadata.key2'])
69
+ """
70
+ return self.__unsearchable_paths_request(payload={"add": paths})
71
+
72
+ def remove(self, paths: List[str]):
73
+ """
74
+ Remove metadata paths from `unsearchablePaths` to index keys under these paths, making them searchable through the Dataset Browser UI and DQL queries.
75
+
76
+ :param paths: list of paths to delete
77
+ :return: true if success, else raise exception
78
+ :rtype: bool
79
+
80
+ **Example**:
81
+
82
+ .. code-block:: python
83
+
84
+ success = dataset.schema.unsearchable_paths.remove(paths=['metadata.key1', 'metadata.key2'])
85
+ """
86
+ return self.__unsearchable_paths_request(payload={"remove": paths})
87
+
88
+
89
+ class Schema:
90
+ """
91
+ Schema Repository
92
+ """
93
+
94
+ def __init__(self, client_api: ApiClient, dataset: entities.Dataset):
95
+ self._client_api = client_api
96
+ self.dataset = dataset
97
+ self.unsearchable_paths = UnsearchablePaths(client_api=self._client_api, dataset=dataset)
98
+
99
+ ###########
100
+ # methods #
101
+ ###########
102
+ def get(self):
103
+ """
104
+ Get dataset schema
105
+
106
+ :return: dataset schema
107
+ :rtype: dict
108
+
109
+ **Example**:
110
+
111
+ .. code-block:: python
112
+
113
+ json = dataset.schema.get()
114
+ """
115
+ success, response = self._client_api.gen_request(req_type='get',
116
+ path='/datasets/{}/schema'.format(self.dataset.id))
117
+ if not success:
118
+ raise exceptions.PlatformException(response)
119
+
120
+ return response.json()
@@ -450,7 +450,8 @@ class Services:
450
450
  name: str,
451
451
  action: str = 'created',
452
452
  support: str = None,
453
- docs: str = None
453
+ docs: str = None,
454
+ agent_info: dict = None
454
455
  ):
455
456
  url = "/services/{}/notify".format(service_id)
456
457
  payload = {
@@ -458,6 +459,8 @@ class Services:
458
459
  'message': message,
459
460
  'notificationName': name
460
461
  }
462
+ if agent_info is not None:
463
+ payload['agentInfo'] = agent_info
461
464
 
462
465
  if support:
463
466
  payload['support'] = support
@@ -981,7 +981,7 @@ class ApiClient:
981
981
 
982
982
  # prepare request
983
983
  if is_dataloop:
984
- full_url = self.environment + path
984
+ full_url = self.__base_gate_url + path
985
985
  headers_req = self._build_request_headers(headers=headers)
986
986
  else:
987
987
  full_url = path
@@ -1018,7 +1018,7 @@ class ApiClient:
1018
1018
  timeout=timeout) as session:
1019
1019
  try:
1020
1020
  async with session._request(request=session._client.request,
1021
- url=self.environment + path,
1021
+ url=self.__base_gate_url + path,
1022
1022
  method=req_type,
1023
1023
  json=json_req,
1024
1024
  data=data,
@@ -1135,7 +1135,7 @@ class ApiClient:
1135
1135
  form.add_field('file', AsyncUploadStream(buffer=to_upload,
1136
1136
  callback=callback,
1137
1137
  name=uploaded_filename))
1138
- url = '{}?mode={}'.format(self.environment + remote_url, mode)
1138
+ url = '{}?mode={}'.format(self.__base_gate_url + remote_url, mode)
1139
1139
 
1140
1140
  # use SSL context
1141
1141
  ssl_context = None
@@ -1451,6 +1451,7 @@ class ApiClient:
1451
1451
  :param force: force login. in case login with same user but want to get a new JWT
1452
1452
  :return:
1453
1453
  """
1454
+ logger.warning('dl.login_secret is deprecated. Please use dl.login_m2m instead.')
1454
1455
  return login_secret(api_client=self,
1455
1456
  email=email,
1456
1457
  password=password,
@@ -1486,6 +1487,14 @@ class ApiClient:
1486
1487
  """
1487
1488
  self.token = token # this will also set the refresh_token to None
1488
1489
 
1490
+ def login_api_key(self, api_key):
1491
+ """
1492
+ Login using API key
1493
+ :param api_key: a valid API key
1494
+ :return:
1495
+ """
1496
+ self.token = api_key
1497
+
1489
1498
  @property
1490
1499
  def login_domain(self):
1491
1500
  if self._login_domain is None:
dtlpy/services/logins.py CHANGED
@@ -1,4 +1,5 @@
1
1
  from urllib.parse import urlsplit, urlunsplit
2
+ import base64
2
3
  import requests
3
4
  import logging
4
5
  import json
@@ -89,6 +90,51 @@ def logout(api_client):
89
90
  return True
90
91
 
91
92
 
93
+ def login_html():
94
+ try:
95
+ location = os.path.dirname(os.path.realpath(__file__))
96
+ except NameError:
97
+ location = './dtlpy/services'
98
+ filename = os.path.join(location, '..', 'assets', 'lock_open.png')
99
+
100
+ if os.path.isfile(filename):
101
+
102
+ with open(filename, 'rb') as f:
103
+ image = f.read()
104
+
105
+ html = (
106
+ " <!doctype html>\n"
107
+ " <html>\n"
108
+ " <head>\n"
109
+ " <style>\n"
110
+ " body {{\n"
111
+ " background-color: #F7F7F9 !important;\n"
112
+ " display: flex;\n"
113
+ " justify-content: center;\n"
114
+ " align-items: center;\n"
115
+ " height: 100vh;\n"
116
+ " width: 100vw;\n"
117
+ " margin: 0;\n"
118
+ " }}\n"
119
+ " img {{\n"
120
+ " display: block;\n"
121
+ " max-width: 100%;\n"
122
+ " max-height: 100%;\n"
123
+ " margin: auto;\n"
124
+ " }}\n"
125
+ " </style>\n"
126
+ " </head>\n"
127
+ " <body>\n"
128
+ " <img src='data:image/png;base64,{image}'>\n"
129
+ " </body>\n"
130
+ " </html>\n"
131
+ ).format(image=base64.b64encode(image).decode())
132
+ else:
133
+ html = "<!doctype html><html><body>Logged in successfully</body></html>"
134
+
135
+ return html.encode('utf-8')
136
+
137
+
92
138
  def login(api_client, auth0_url=None, audience=None, client_id=None, login_domain=None, callback_port=None):
93
139
  import webbrowser
94
140
  from http.server import BaseHTTPRequestHandler, HTTPServer
@@ -111,24 +157,9 @@ def login(api_client, auth0_url=None, audience=None, client_id=None, login_domai
111
157
  parsed_path = urlparse(self.path)
112
158
  query = parse_qs(parsed_path.query)
113
159
  self.send_response(200)
114
-
115
- # get display image
116
- try:
117
- # working directory when running from command line
118
- location = os.path.dirname(os.path.realpath(__file__))
119
- except NameError:
120
- # working directory when running from console
121
- location = './dtlpy/services'
122
- filename = os.path.join(location, '..', 'assets', 'lock_open.png')
123
- if os.path.isfile(filename):
124
- with open(filename, 'rb') as f:
125
- self.send_header('Content-type', 'image/jpg')
126
- self.end_headers()
127
- self.wfile.write(f.read())
128
- else:
129
- self.send_header('Content-type', 'text/html')
130
- self.end_headers()
131
- self.wfile.write(bytes("<!doctype html><html><body>Logged in successfully</body></html>", 'utf-8'))
160
+ self.send_header('Content-type', 'text/html')
161
+ self.end_headers()
162
+ self.wfile.write(login_html())
132
163
  self.__class__.id_token = query['id_token'][0]
133
164
  self.__class__.access_token = query['access_token'][0]
134
165
  self.__class__.refresh_token = query['refresh_token'][0]
@@ -250,10 +250,11 @@ class DatasetGenerator:
250
250
  entities.AnnotationType.BOX,
251
251
  entities.AnnotationType.POLYGON]:
252
252
  raise ValueError('unsupported annotation type: {}'.format(annotation.type))
253
+ dtype = object if self.annotation_type == entities.AnnotationType.POLYGON else None
253
254
  # reorder for output
254
255
  item_info.update({entities.AnnotationType.BOX.value: np.asarray(box_coordinates).astype(float),
255
256
  entities.AnnotationType.CLASSIFICATION.value: np.asarray(classes_ids),
256
- entities.AnnotationType.POLYGON.value: np.asarray(polygon_coordinates, dtype=object),
257
+ entities.AnnotationType.POLYGON.value: np.asarray(polygon_coordinates, dtype=dtype),
257
258
  'labels': labels})
258
259
  if len(item_info[entities.AnnotationType.CLASSIFICATION.value]) == 0:
259
260
  logger.debug('Empty annotation (nothing matched label_to_id_map) for image filename: {}'.format(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dtlpy
3
- Version: 1.89.29
3
+ Version: 1.90.39
4
4
  Summary: SDK and CLI for Dataloop platform
5
5
  Home-page: https://github.com/dataloop-ai/dtlpy
6
6
  Author: Dataloop Team
@@ -43,7 +43,7 @@ Requires-Dist: redis (>=3.5)
43
43
  Requires-Dist: inquirer
44
44
  Requires-Dist: dtlpymetrics
45
45
 
46
- ![logo](https://mk0dataloop4fni44fjg.kinstacdn.com/wp-content/uploads/2020/03/logo.svg)
46
+ ![logo.svg](docs%2F_static%2Flogo.svg)
47
47
  [![Documentation Status](https://readthedocs.org/projects/dtlpy/badge/?version=latest)](https://sdk-docs.dataloop.ai/en/latest/?badge=latest)
48
48
  [![pypi](https://img.shields.io/pypi/v/dtlpy.svg)](https://pypi.org/project/dtlpy/)
49
49
  [![versions](https://img.shields.io/pypi/pyversions/dtlpy.svg)](https://github.com/dataloop-ai/dtlpy)
@@ -1,9 +1,9 @@
1
- dtlpy/__init__.py,sha256=gIb75Q3QsLd3UOvRgIFiDTV4u5HJr0M6KohWZVVlQp0,20086
2
- dtlpy/__version__.py,sha256=2A95rDAaSw8h3Wn5puveqxUlRGvTANhXLJ7c1jiQhsE,20
1
+ dtlpy/__init__.py,sha256=pJiyuUv6L9bWnW1vcknHAjYP-pouMCYzzexyDKxELoI,20127
2
+ dtlpy/__version__.py,sha256=BcmejWGgARVAkaNKsamesQsOmoijL9t0WK_6O0DCjuU,20
3
3
  dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
4
4
  dtlpy/new_instance.py,sha256=_-F1NTJgGCCHnW68eIexwq7leLGTCHcPxodbw9mfasI,5555
5
5
  dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
6
- dtlpy/assets/lock_open.png,sha256=vXHune4YF__fINPQ2l61G2zI3BeJPX_z5gkwzUNFAxs,24081
6
+ dtlpy/assets/lock_open.png,sha256=WhGXI8pYNXF3nkbbwUV7BMY8QInXmyyZbS04JoYh3UY,18132
7
7
  dtlpy/assets/main.py,sha256=N1JUsx79qnXI7Hx22C8JOzHJdGHxvrXeTx5UZAxvJfE,1380
8
8
  dtlpy/assets/main_partial.py,sha256=d8Be4Whg9Tb2VFiT85-57_L9IvxRipQXiZ83SxFs0Ro,267
9
9
  dtlpy/assets/mock.json,sha256=aByh4XlsFQJM2pOjmd7bd9zT1LSOj5pfutZDHwt8c_8,149
@@ -42,7 +42,7 @@ dtlpy/callbacks/piper_progress_reporter.py,sha256=L9OK-n6zqBP0SFhq0lrMXuMjf4uWfy
42
42
  dtlpy/callbacks/progress_viewer.py,sha256=ZZw8ljXVP2kpndLRxOhY09dOgUN7Luop-4TUuT5nSDc,2314
43
43
  dtlpy/dlp/__init__.py,sha256=QG_BxSqeic0foFBmzIkpZEF4EvxOZamknj2f5Cb6T6Q,868
44
44
  dtlpy/dlp/cli_utilities.py,sha256=Kzr-AKbRlXLdGKY2RTUNm0U_vKHxyMOB17TQegeDMdM,16037
45
- dtlpy/dlp/command_executor.py,sha256=jC2i2Aro9i_DLm5bLoZTPrPbktgSd0h5kpGSkoJsEjY,31457
45
+ dtlpy/dlp/command_executor.py,sha256=JKtRKTwrKfkXHa1VuFhPw15FuwexBPq_9ANAu2pSyXs,32113
46
46
  dtlpy/dlp/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
47
47
  dtlpy/dlp/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
48
48
  dtlpy/dlp/dlp.py,sha256=YjNBjeCDTXJ7tj8qdiGZ8lFb8DtPZl-FvViyjxt9xF8,4278
@@ -51,35 +51,35 @@ dtlpy/entities/__init__.py,sha256=cwA3ZOsZbdiixwhH6D15sU_bwybDAB8P2mg1zThajBU,45
51
51
  dtlpy/entities/analytic.py,sha256=5eAavh_NmvSWsD7uzon2vQn08chbc-dh1nlJc0awkQI,11374
52
52
  dtlpy/entities/annotation.py,sha256=s-g39sdUFafhsbGN1Taf2DSqoKbyRtWc0TVNhPpEAwA,67515
53
53
  dtlpy/entities/annotation_collection.py,sha256=Uh7pnyhAv4epMApHRtIdWRG_ro9U6p7yNV7Lnjzdzqk,29979
54
- dtlpy/entities/app.py,sha256=_ILlIofYGUa6LBmNcEcPuYjQpDA5lnM6TIYL18Qzeyk,5579
54
+ dtlpy/entities/app.py,sha256=7i7xDRnyN2PgJ_oUNfhkTh7CxRHaXf-uGS2eX_DjAAg,6295
55
55
  dtlpy/entities/app_module.py,sha256=0UiAbBX1q8iEImi3nY7ySWZZHoRRwu0qUXmyXmgVAc4,3645
56
56
  dtlpy/entities/artifact.py,sha256=wtLtBuidOPbnba0ok40JyunCCIBGbAl4bP_ebK39Kk4,5711
57
57
  dtlpy/entities/assignment.py,sha256=LTUxE2ZB5iZSbSeoroHe5P7Kv6E_K2b5cdY-7TZ4HFE,14346
58
- dtlpy/entities/base_entity.py,sha256=i8VzoXdkkJbUJD7U7U-bkbQKiUWkRwZZhl-mQT5WUsc,7578
58
+ dtlpy/entities/base_entity.py,sha256=II40doslcKJ97wtTIwhSYCNA9Lqvb5Y9LkiYNIdqDpM,7553
59
59
  dtlpy/entities/bot.py,sha256=is3NUCnPg56HSjsHIvFcVkymValMqDV0uHRDC1Ib-ds,3819
60
60
  dtlpy/entities/codebase.py,sha256=pwRkAq2GV0wvmzshg89IAmE-0I2Wsy_-QNOu8OV8uqc,8999
61
61
  dtlpy/entities/command.py,sha256=dv7w0GfTqq4OeLUoAUElYCdOrvE2Lp9Cx-XdA9N4314,4976
62
- dtlpy/entities/dataset.py,sha256=6n_0TeH4A8Je-ZUwX1Vc2GFJO9k8HU67n_j2hHWcFi4,44831
62
+ dtlpy/entities/dataset.py,sha256=6rqfC4YA6q75-q3Hbfv2XR4tGkajMwXEgzP-b0OQw8U,45075
63
63
  dtlpy/entities/directory_tree.py,sha256=Rni6pLSWytR6yeUPgEdCCRfTg_cqLOdUc9uCqz9KT-Q,1186
64
- dtlpy/entities/dpk.py,sha256=PmaZiveM7v0FynV5P7juYKy4yDdrTwBhTroB9uFpMbc,16569
64
+ dtlpy/entities/dpk.py,sha256=zTqzFIIagaJ3jUcFFI3wAmmNzVD4q0fBt41Ts0TLxeg,16788
65
65
  dtlpy/entities/driver.py,sha256=O_QdK1EaLjQyQkmvKsmkNgmvmMb1mPjKnJGxK43KrOA,7197
66
66
  dtlpy/entities/execution.py,sha256=xZxfF3tYtf7tJ_GKzwSoEkF1zhlnWmbBzLoVXrNl8DE,12800
67
67
  dtlpy/entities/feature.py,sha256=9fFjD0W57anOVSAVU55ypxN_WTCsWTG03Wkc3cAAj78,3732
68
68
  dtlpy/entities/feature_set.py,sha256=lKbaLLklYC8dZGCDzQa8T-LSYpUygqLwlC_jhHtOfLw,4326
69
- dtlpy/entities/filters.py,sha256=KazfahD_Oo5Vd646RS_k9ynmmiKFhCk5rNs7tsh3E_c,18847
69
+ dtlpy/entities/filters.py,sha256=H3_EYUNp0K8HHcddma1s308EIM0ALmXCJxmPyXA7GzU,19029
70
70
  dtlpy/entities/integration.py,sha256=Hi4PsIbrVx0LnytxI9GtPcrqRgm8Mq_BZUln1KUtxo8,5733
71
71
  dtlpy/entities/item.py,sha256=JGdBa8RDPWnVOdqRdKoz-T3o8rCOsLbKFdhs0KQDa6o,28369
72
72
  dtlpy/entities/label.py,sha256=ycDYavIgKhz806plIX-64c07_TeHpDa-V7LnfFVe4Rg,3869
73
73
  dtlpy/entities/links.py,sha256=FAmEwHtsrqKet3c0UHH9u_gHgG6_OwF1-rl4xK7guME,2516
74
74
  dtlpy/entities/message.py,sha256=ApJuaKEqxATpXjNYUjGdYPu3ibQzEMo8-LtJ_4xAcPI,5865
75
- dtlpy/entities/model.py,sha256=erPZ0EW3MOL0PA8s0Eo2YHtJdwJWxOn7ha1wNQxkBV4,24750
75
+ dtlpy/entities/model.py,sha256=GXfQoLa06GSsymBiRTqI2FYegLwyrc5S8lWMwGWxhjw,24959
76
76
  dtlpy/entities/node.py,sha256=jpf_aQRoBxD6XEZ3xjpgTPp8Iuj6Z4mAQbK7RC0Larw,37463
77
77
  dtlpy/entities/ontology.py,sha256=hpex7wLJ2JXMFq3lxLfN49LPz4M3GJIjqD-7HL5hOEw,29265
78
78
  dtlpy/entities/organization.py,sha256=AMkx8hNIIIjnu5pYlNjckMRuKt6H3lnOAqtEynkr7wg,9893
79
79
  dtlpy/entities/package.py,sha256=EA5cB3nFBlsbxVK-QroZILjol2bYSVGqCby-mOyJJjQ,26353
80
80
  dtlpy/entities/package_defaults.py,sha256=wTD7Z7rGYjVy8AcUxTFEnkOkviiJaLVZYvduiUBKNZo,211
81
- dtlpy/entities/package_function.py,sha256=kQsfW-LOTAVd8oDm-F0iWLvKsFmAXSp5ZoBrXPvISRE,6067
82
- dtlpy/entities/package_module.py,sha256=MBaJ5j8eCERsP-s1SIO8_daTU1gEqcaDSpUBu_gUTAk,4035
81
+ dtlpy/entities/package_function.py,sha256=Y24zVxTxN602cv63I8gIZ9J7wu9lThj8XF0xW4cHutk,6166
82
+ dtlpy/entities/package_module.py,sha256=PpEe635I8jnPbA1YDr_5jX3a_mEk-z-mBeX8Lw3HIgY,4134
83
83
  dtlpy/entities/package_slot.py,sha256=XBwCodQe618sQm0bmx46Npo94mEk-zUV7ZX0mDRcsD8,3946
84
84
  dtlpy/entities/paged_entities.py,sha256=A6_D0CUJsN52dBG6yn-oHHzjuVDkBNejTG5r-KxWOxI,5848
85
85
  dtlpy/entities/pipeline.py,sha256=Fh3vB2SvM-yhA8wWtIv9zojaqnE2c1cjJQr5CmveBS0,20119
@@ -147,15 +147,16 @@ dtlpy/miscellaneous/json_utils.py,sha256=0P4YTlL6o_L7AUrvAeqkqA46MZZK_hDdTrdnmI5
147
147
  dtlpy/miscellaneous/list_print.py,sha256=leEg3RodgYfH5t_0JG8VuM8NiesR8sJLK_mRSttL5pY,4808
148
148
  dtlpy/miscellaneous/zipping.py,sha256=GMdPhAeHQXeMS5ClaiKWMJWVYQLBLAaJUWxvdYrL4Ro,5337
149
149
  dtlpy/ml/__init__.py,sha256=vPkyXpc9kcWWZ_PxyPEOsjKBJdEbowLkZr8FZIb_OBM,799
150
- dtlpy/ml/base_model_adapter.py,sha256=wlq5dBrn4EF-W2Nw3r71ayj2y926x5HZJ1ci_agdStc,39640
150
+ dtlpy/ml/base_feature_extractor_adapter.py,sha256=iiEGYAx0Rdn4K46H_FlKrAv3ebTXHSxNVAmio0BxhaI,1178
151
+ dtlpy/ml/base_model_adapter.py,sha256=QTcYhOZ1wiiyVAjFj5tzTl396xpAVw6hSu8Qf3SD3-8,39941
151
152
  dtlpy/ml/metrics.py,sha256=BG2E-1Mvjv2e2No9mIJKVmvzqBvLqytKcw3hA7wVUNc,20037
152
153
  dtlpy/ml/predictions_utils.py,sha256=He_84U14oS2Ss7T_-Zj5GDiBZwS-GjMPURUh7u7DjF8,12484
153
154
  dtlpy/ml/summary_writer.py,sha256=dehDi8zmGC1sAGyy_3cpSWGXoGQSiQd7bL_Thoo8yIs,2784
154
155
  dtlpy/ml/train_utils.py,sha256=R-BHKRfqDoLLhFyLzsRFyJ4E-8iedj9s9oZqy3IO2rg,2404
155
- dtlpy/repositories/__init__.py,sha256=RskGSdPzJwsukYEIgs_7MW_1tqVx6hTPQ2WUhQ5PA5w,1922
156
+ dtlpy/repositories/__init__.py,sha256=_p6RafEniBXbeTAbz8efnIr4G4mCwV9x6LEkXhTRWUE,1949
156
157
  dtlpy/repositories/analytics.py,sha256=dQPCYTPAIuyfVI_ppR49W7_GBj0033feIm9Gd7LW1V0,2966
157
158
  dtlpy/repositories/annotations.py,sha256=E7iHo8UwDAhdulqh0lGr3fGQ-TSwZXXGsEXZA-WJ_NA,35780
158
- dtlpy/repositories/apps.py,sha256=CgWj392YLNkIiv2TPIub2fSMDqzsQ_yqK282t7fC3EY,10942
159
+ dtlpy/repositories/apps.py,sha256=6LHvjp8LlHmta4kJo3D4a0potSodfQ59yFkNQ08t4EU,13402
159
160
  dtlpy/repositories/artifacts.py,sha256=Ke2ustTNw-1eQ0onLsWY7gL2aChjXPAX5p1uQ_EzMbo,19081
160
161
  dtlpy/repositories/assignments.py,sha256=M1vlixBdAjwStqCG1MQjHsj3dH15KT0Rb5UTDtyDpEQ,25464
161
162
  dtlpy/repositories/bots.py,sha256=q1SqH01JHloljKxknhHU09psV1vQx9lPhu3g8mBBeRg,8104
@@ -168,11 +169,11 @@ dtlpy/repositories/dpks.py,sha256=lVaVsDElhO6vfvGbrsKvzAXdY3L4qjQMRCZPWIrDq6A,15
168
169
  dtlpy/repositories/drivers.py,sha256=fF0UuHCyBzop8pHfryex23mf0kVFAkqzNdOmwBbaWxY,10204
169
170
  dtlpy/repositories/executions.py,sha256=ixfekN5quQ_tQL7kdfPOb4r3qCvNTlWIBV8hPk2ElbE,30480
170
171
  dtlpy/repositories/feature_sets.py,sha256=xSceAIwK608JSU42IPT1NrvCtnaDJ582JfLd05F3Cck,8811
171
- dtlpy/repositories/features.py,sha256=CiZTnHatek6pqJup1hcHryI-0waagN4273eyhirCxgM,9301
172
+ dtlpy/repositories/features.py,sha256=X9luMRoTMbwhIEh3UTVQtd3jl6ToFUmv9s39EHuLKIc,9616
172
173
  dtlpy/repositories/integrations.py,sha256=gNQmw5ykFtBaimdxUkzCXQqefZaM8yQPnxWZkIJK7ww,11666
173
174
  dtlpy/repositories/items.py,sha256=DqJ3g9bc4OLMm9KqI-OebXbr-zcEiohO1wGZJ1uE2Lg,37874
174
175
  dtlpy/repositories/messages.py,sha256=zYcoz8Us6j8Tb5Z7luJuvtO9xSRTuOCS7pl-ztt97Ac,3082
175
- dtlpy/repositories/models.py,sha256=lQ5PQI0cRhHBaHb6h-B0ays3YqnJt-ng5q19fC875N4,33445
176
+ dtlpy/repositories/models.py,sha256=7kYYE0cfw57bsv_T18QqjCBZsd6fSp99y_Vm94Ya44o,34813
176
177
  dtlpy/repositories/nodes.py,sha256=xXJm_YA0vDUn0dVvaGeq6ORM0vI3YXvfjuylvGRtkxo,3061
177
178
  dtlpy/repositories/ontologies.py,sha256=unnMhD2isR9DVE5S8Fg6fSDf1ZZ5Xemxxufx4LEUT3w,19577
178
179
  dtlpy/repositories/organizations.py,sha256=6ijUDFbsogfRul1g_vUB5AZOb41MRmV5NhNU7WLHt3A,22825
@@ -180,9 +181,10 @@ dtlpy/repositories/packages.py,sha256=FCshPoEDuKNfA3WIPqzqr_w_fJV2dFw2IvLdSuVVxv
180
181
  dtlpy/repositories/pipeline_executions.py,sha256=CYxJ_Lt5yeZI0Y0uhaolmgshu6_96QmPQUtyWrNGYFE,14421
181
182
  dtlpy/repositories/pipelines.py,sha256=VDAOsGbgD1_AKdMrJl_qB3gxPs7f3pwUnPx0pT1iAWk,23977
182
183
  dtlpy/repositories/projects.py,sha256=tZyFLqVs-8ggTIi5echlX7XdGOJGW4LzKuXke7jkRnw,22140
183
- dtlpy/repositories/recipes.py,sha256=kPsN6htcEzdO4JLq3cidzSnZUdx8RDVW2fvNFXdPKWk,15756
184
+ dtlpy/repositories/recipes.py,sha256=ZZDhHn9g28C99bsf0nFaIpVYn6f6Jisz9upkHEkeaYY,15843
184
185
  dtlpy/repositories/resource_executions.py,sha256=PyzsbdJxz6jf17Gx13GZmqdu6tZo3TTVv-DypnJ_sY0,5374
185
- dtlpy/repositories/services.py,sha256=x89_1ClnOahMvA0siOkh3A2WQXeFtAu_rYe7xTfAnwI,66237
186
+ dtlpy/repositories/schema.py,sha256=kTKDrbwm7BfQnBAK81LpAl9ChNFdyUweSLNazlJJhjk,3953
187
+ dtlpy/repositories/services.py,sha256=MDXQ6vAwZNXtBfhjekFHl2vzjGSZdbk6EJ1cAOkkKHs,66355
186
188
  dtlpy/repositories/settings.py,sha256=pvqNse0ANCdU3NSLJEzHco-PZq__OIsPSPVJveB9E4I,12296
187
189
  dtlpy/repositories/tasks.py,sha256=bbk0l0EZzFFEN_TjHBqDh6GNVum3yF902gdjO0iEFhk,48076
188
190
  dtlpy/repositories/times_series.py,sha256=m-bKFEgiZ13yQNelDjBfeXMUy_HgsPD_JAHj1GVx9fU,11420
@@ -192,7 +194,7 @@ dtlpy/repositories/uploader.py,sha256=PzEq4yHsxJT1lv_wYGS-gh7Wmu37TA-14ww_r-6HBr
192
194
  dtlpy/repositories/webhooks.py,sha256=IIpxOJ-7KeQp1TY9aJZz-FuycSjAoYx0TDk8z86KAK8,9033
193
195
  dtlpy/services/__init__.py,sha256=VfVJy2otIrDra6i7Sepjyez2ujiE6171ChQZp-YgxsM,904
194
196
  dtlpy/services/aihttp_retry.py,sha256=tgntZsAY0dW9v08rkjX1T5BLNDdDd8svtgn7nH8DSGU,5022
195
- dtlpy/services/api_client.py,sha256=Ew3neG7P8giK3CwFXYWMJnbRAYbGFA2K_jIVavUqnB8,65848
197
+ dtlpy/services/api_client.py,sha256=bMmLKQ0BbzLzEsEcZe-RSADjkRjTEoWJiQIQvo8h2L4,66127
196
198
  dtlpy/services/api_reference.py,sha256=cW-B3eoi9Xs3AwI87_Kr6GV_E6HPoC73aETFaGz3A-0,1515
197
199
  dtlpy/services/async_utils.py,sha256=lfpkTkRUvQoMTxaRZBHbPt5e43qdvpCGDe_-KcY2Jps,2810
198
200
  dtlpy/services/calls_counter.py,sha256=gr0io5rIsO5-7Cgc8neA1vK8kUtYhgFPmDQ2jXtiZZs,1036
@@ -200,7 +202,7 @@ dtlpy/services/check_sdk.py,sha256=tnFWCzkJa8w2jLtw-guwuqpOtXGyiVU7ZCDFiUZUqzY,3
200
202
  dtlpy/services/cookie.py,sha256=sSZR1QV4ienCcZ8lEK_Y4nZYBgAxO3kHrcBXFKGcmwQ,3694
201
203
  dtlpy/services/create_logger.py,sha256=WFQjuvCuwrZoftFaU9jQkmEcOrL1XD-NqsuBqb5_SN4,6332
202
204
  dtlpy/services/events.py,sha256=mpcu8RusLPrBcJEbWR61uFb4FiU_dQv3xoa7uM-rTcY,3686
203
- dtlpy/services/logins.py,sha256=sSE4kmXw9EZylgIQaUYgHH-MgtpEcjUMJpFn5X5C-lA,7906
205
+ dtlpy/services/logins.py,sha256=YMMi_C_A97ZNtIlREE30hpBRhULAZJtORiVL6OL0oPQ,8766
204
206
  dtlpy/services/reporter.py,sha256=4zi9-bshKAPHG2XMOXS39cFZ0mhqNc3Qa9uaMN7CSZ8,9122
205
207
  dtlpy/services/service_defaults.py,sha256=a7KoqkVmn2TXmM9gN9JRaVVtcG2b8JGIieVnaZeEaao,3860
206
208
  dtlpy/utilities/__init__.py,sha256=ncQD1O5lZ7L9n9rNRBivyqNVFDZyQcmqn-X-wyQhhIs,898
@@ -209,7 +211,7 @@ dtlpy/utilities/converter.py,sha256=8mOdKiLe1ATgR1Q56-c6d716aKGRt0A3M9AelijaYN8,
209
211
  dtlpy/utilities/annotations/__init__.py,sha256=Eb72MloiwDQWe8H4NptFP1RZEEhcY2Fz_w_e34tdCiE,728
210
212
  dtlpy/utilities/annotations/annotation_converters.py,sha256=KOqLVtb88GnrvuVi5x-t5vtzVN9Am98RersBl_D44SU,10796
211
213
  dtlpy/utilities/dataset_generators/__init__.py,sha256=pA7UqhTh51gC407FyNa_WG8fUFnd__4tmEUTkNBlcLs,65
212
- dtlpy/utilities/dataset_generators/dataset_generator.py,sha256=-zVhDESAfy3rMTBE79JuU9S8ENYee_MVoCyWl7mouQk,31380
214
+ dtlpy/utilities/dataset_generators/dataset_generator.py,sha256=QOuydoXL9NlYYMw2jY8Dl6y4dNcldT3kkeu0V95R0MQ,31479
213
215
  dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py,sha256=ZY97o4UAR5tM_req9O1Wh_N-U3cebSypub6kibykYP8,716
214
216
  dtlpy/utilities/dataset_generators/dataset_generator_torch.py,sha256=qPm03zVZmga_BQSyWgcodYQL25WYiiBtz8QpCsU4oYc,536
215
217
  dtlpy/utilities/local_development/__init__.py,sha256=6s1Ns7mN20J3yyIlgQbKAhPRqy7zQo-hIafLDrIj5cg,70
@@ -220,14 +222,19 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
220
222
  dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
221
223
  dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
222
224
  dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
223
- dtlpy-1.89.29.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
224
- dtlpy-1.89.29.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
225
- dtlpy-1.89.29.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
225
+ dtlpy-1.90.39.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
226
+ dtlpy-1.90.39.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
227
+ dtlpy-1.90.39.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
228
+ tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
229
+ tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
230
+ tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
231
+ tests/assets/models_flow/main.py,sha256=87O3-JaWcC6m_kA39sqPhX70_VCBzzbLWmX2YQFilJw,1873
232
+ tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
226
233
  tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
227
234
  tests/features/environment.py,sha256=V0FuAjbwiN1ddlJrCjjGSgPrU9TEQWraZkp1E7QDzdQ,13849
228
- dtlpy-1.89.29.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
229
- dtlpy-1.89.29.dist-info/METADATA,sha256=NuIo2ZEbK5luVxvtbMaZRZ39EJIiMAvs4fLWDK59y0w,3026
230
- dtlpy-1.89.29.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
231
- dtlpy-1.89.29.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
232
- dtlpy-1.89.29.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
233
- dtlpy-1.89.29.dist-info/RECORD,,
235
+ dtlpy-1.90.39.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
236
+ dtlpy-1.90.39.dist-info/METADATA,sha256=Lbu5Y6VhyT-lUBvB0W3mhwE9MVt1DWrRu-sBbZMY2tI,2976
237
+ dtlpy-1.90.39.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
238
+ dtlpy-1.90.39.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
239
+ dtlpy-1.90.39.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
240
+ dtlpy-1.90.39.dist-info/RECORD,,
File without changes
File without changes
@@ -0,0 +1,52 @@
1
+ import time
2
+
3
+ import dtlpy as dl
4
+ import logging
5
+ import os
6
+ import random
7
+
8
+ logger = logging.getLogger('dummy-adapter')
9
+
10
+
11
+ @dl.Package.decorators.module(name='model-adapter',
12
+ description='Model Adapter for Dummy Model',
13
+ init_inputs={'model_entity': dl.Model})
14
+ class ModelAdapter(dl.BaseModelAdapter):
15
+ """
16
+ Dummy Model adapter using pytorch.
17
+ The class bind Dataloop model and model entities with model code implementation
18
+ """
19
+
20
+ def __init__(self, model_entity=None):
21
+ super(ModelAdapter, self).__init__(model_entity=model_entity)
22
+
23
+ def load(self, local_path, **kwargs):
24
+ logger.info("Loaded model")
25
+
26
+ def save(self, local_path, **kwargs):
27
+ logger.info("Saved model")
28
+
29
+ def train(self, data_path, output_path, **kwargs):
30
+ logger.info("training")
31
+ raise Exception("Failed to train")
32
+
33
+ def predict(self, batch, **kwargs):
34
+ batch_annotations = list()
35
+
36
+ for img in batch:
37
+ collection = dl.AnnotationCollection()
38
+ for index in range(5):
39
+ collection.add(
40
+ annotation_definition=dl.Box(label=self.model_entity.labels[index], top=index * 10, left=index * 10,
41
+ bottom=index * 10 + 10, right=index * 10 + 10),
42
+ model_info={'name': "test-model",
43
+ 'confidence': 0.5,
44
+ 'model_id': self.model_entity.id,
45
+ 'dataset_id': self.model_entity.dataset_id})
46
+ logger.debug("Predicted {} ({})".format(str(index), index * 0.1))
47
+ batch_annotations.append(collection)
48
+
49
+ return batch_annotations
50
+
51
+ def convert_from_dtlpy(self, data_path, **kwargs):
52
+ logger.info("convert_from_dtlpy")
@@ -0,0 +1,51 @@
1
+ import dtlpy as dl
2
+ import logging
3
+ import os
4
+ import random
5
+
6
+ logger = logging.getLogger('dummy-adapter')
7
+
8
+
9
+ @dl.Package.decorators.module(name='model-adapter',
10
+ description='Model Adapter for Dummy Model',
11
+ init_inputs={'model_entity': dl.Model})
12
+ class ModelAdapter(dl.BaseModelAdapter):
13
+ """
14
+ Dummy Model adapter using pytorch.
15
+ The class bind Dataloop model and model entities with model code implementation
16
+ """
17
+
18
+ def __init__(self, model_entity=None):
19
+ super(ModelAdapter, self).__init__(model_entity=model_entity)
20
+
21
+ def load(self, local_path, **kwargs):
22
+ logger.info("Loaded model")
23
+
24
+ def save(self, local_path, **kwargs):
25
+ logger.info("Saved model")
26
+
27
+ def train(self, data_path, output_path, **kwargs):
28
+ logger.info("model training")
29
+ print(self.model_entity.id)
30
+
31
+ def predict(self, batch, **kwargs):
32
+ logger.info("model prediction")
33
+ batch_annotations = list()
34
+
35
+ for img in batch:
36
+ collection = dl.AnnotationCollection()
37
+ for index in range(5):
38
+ collection.add(
39
+ annotation_definition=dl.Box(label=self.model_entity.labels[index], top=index * 10, left=index * 10,
40
+ bottom=index * 10 + 10, right=index * 10 + 10),
41
+ model_info={'name': "test-model",
42
+ 'confidence': 0.5,
43
+ 'model_id': self.model_entity.id,
44
+ 'dataset_id': self.model_entity.dataset_id})
45
+ logger.debug("Predicted {} ({})".format(str(index), index * 0.1))
46
+ batch_annotations.append(collection)
47
+
48
+ return batch_annotations
49
+
50
+ def convert_from_dtlpy(self, data_path, **kwargs):
51
+ logger.info("convert_from_dtlpy")
@@ -0,0 +1,54 @@
1
+ import dtlpy as dl
2
+ import logging
3
+ import os
4
+ import random
5
+
6
+ logger = logging.getLogger('dummy-adapter')
7
+
8
+
9
+ @dl.Package.decorators.module(name='model-adapter',
10
+ description='Model Adapter for Dummy Model',
11
+ init_inputs={'model_entity': dl.Model, "test": "String"})
12
+ class ModelAdapter(dl.BaseModelAdapter):
13
+ """
14
+ Dummy Model adapter using pytorch.
15
+ The class bind Dataloop model and model entities with model code implementation
16
+ """
17
+
18
+ def __init__(self, test, model_entity=None):
19
+ super(ModelAdapter, self).__init__(model_entity=model_entity)
20
+ self.test = test
21
+
22
+ def load(self, local_path, **kwargs):
23
+ logger.info("Loaded model")
24
+
25
+ def save(self, local_path, **kwargs):
26
+ logger.info("Saved model")
27
+
28
+ def train(self, data_path, output_path, **kwargs):
29
+ if self.test != 'default_value':
30
+ raise ValueError(f"test is not set to default_value: {self.test}")
31
+ logger.info("model training")
32
+ print(self.model_entity.id)
33
+
34
+ def predict(self, batch, **kwargs):
35
+ logger.info("model prediction")
36
+ batch_annotations = list()
37
+
38
+ for img in batch:
39
+ collection = dl.AnnotationCollection()
40
+ for index in range(5):
41
+ collection.add(
42
+ annotation_definition=dl.Box(label=self.model_entity.labels[index], top=index * 10, left=index * 10,
43
+ bottom=index * 10 + 10, right=index * 10 + 10),
44
+ model_info={'name': "test-model",
45
+ 'confidence': 0.5,
46
+ 'model_id': self.model_entity.id,
47
+ 'dataset_id': self.model_entity.dataset_id})
48
+ logger.debug("Predicted {} ({})".format(str(index), index * 0.1))
49
+ batch_annotations.append(collection)
50
+
51
+ return batch_annotations
52
+
53
+ def convert_from_dtlpy(self, data_path, **kwargs):
54
+ logger.info("convert_from_dtlpy")
File without changes