dtlpy 1.95.6__py3-none-any.whl → 1.98.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. dtlpy/__init__.py +2 -2
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/entities/__init__.py +2 -1
  4. dtlpy/entities/annotation.py +3 -0
  5. dtlpy/entities/annotation_definitions/__init__.py +1 -0
  6. dtlpy/entities/annotation_definitions/gis.py +69 -0
  7. dtlpy/entities/app.py +1 -1
  8. dtlpy/entities/filters.py +3 -1
  9. dtlpy/entities/gis_item.py +108 -0
  10. dtlpy/entities/model.py +5 -6
  11. dtlpy/entities/pipeline.py +22 -11
  12. dtlpy/entities/service.py +28 -2
  13. dtlpy/ml/base_model_adapter.py +1 -1
  14. dtlpy/new_instance.py +2 -2
  15. dtlpy/repositories/annotations.py +124 -13
  16. dtlpy/repositories/executions.py +54 -0
  17. dtlpy/repositories/items.py +5 -5
  18. dtlpy/repositories/messages.py +2 -2
  19. dtlpy/repositories/pipeline_executions.py +24 -10
  20. dtlpy/repositories/services.py +2 -0
  21. dtlpy/repositories/tasks.py +12 -3
  22. dtlpy/repositories/uploader.py +12 -4
  23. dtlpy/services/api_client.py +37 -7
  24. dtlpy/services/async_utils.py +4 -2
  25. {dtlpy-1.95.6.dist-info → dtlpy-1.98.8.dist-info}/METADATA +1 -1
  26. {dtlpy-1.95.6.dist-info → dtlpy-1.98.8.dist-info}/RECORD +34 -32
  27. tests/assets/models_flow/main.py +7 -0
  28. {dtlpy-1.95.6.data → dtlpy-1.98.8.data}/scripts/dlp +0 -0
  29. {dtlpy-1.95.6.data → dtlpy-1.98.8.data}/scripts/dlp.bat +0 -0
  30. {dtlpy-1.95.6.data → dtlpy-1.98.8.data}/scripts/dlp.py +0 -0
  31. {dtlpy-1.95.6.dist-info → dtlpy-1.98.8.dist-info}/LICENSE +0 -0
  32. {dtlpy-1.95.6.dist-info → dtlpy-1.98.8.dist-info}/WHEEL +0 -0
  33. {dtlpy-1.95.6.dist-info → dtlpy-1.98.8.dist-info}/entry_points.txt +0 -0
  34. {dtlpy-1.95.6.dist-info → dtlpy-1.98.8.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py CHANGED
@@ -68,7 +68,7 @@ from .entities import (
68
68
  Ontology, Label, Task, TaskPriority, ConsensusTaskType, Assignment, Service, Package, Codebase, Model,
69
69
  PackageModule, PackageFunction,
70
70
  # annotations
71
- Box, Cube, Cube3d, Point, Note, Message, Segmentation, Ellipse, Classification, Subtitle, Polyline, Pose,
71
+ Box, Cube, Cube3d, Point, Note, Message, Segmentation, Ellipse, Classification, Subtitle, Polyline, Pose, Gis, GisType,
72
72
  Description,
73
73
  Polygon, Text, FreeText, RefImage,
74
74
  # filters
@@ -93,7 +93,7 @@ from .entities import (
93
93
  Webhook, HttpMethod,
94
94
  ViewAnnotationOptions, AnnotationStatus, AnnotationType,
95
95
  ItemStatus, ExecutionStatus, ExportMetadata,
96
- PromptItem, Prompt, PromptType,
96
+ PromptItem, Prompt, PromptType, ItemGis, Layer,
97
97
  ItemLink, UrlLink, LinkTypeEnum,
98
98
  Modality, ModalityTypeEnum, ModalityRefTypeEnum,
99
99
  Workload, WorkloadUnit, ItemAction,
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.95.6'
1
+ version = '1.98.8'
@@ -34,7 +34,7 @@ from .recipe import Recipe
34
34
  from .ontology import Ontology, AttributesTypes, AttributesRange
35
35
  from .annotation_definitions import Box, Cube, Cube3d, Point, Segmentation, Polygon, Ellipse, Classification, \
36
36
  Subtitle, Text, FreeText, RefImage, \
37
- Polyline, Comparison, UndefinedAnnotationType, Note, Message, Description, Pose
37
+ Polyline, Comparison, UndefinedAnnotationType, Note, Message, Description, Pose, Gis, GisType
38
38
  from .label import Label
39
39
  from .codebase import Codebase, PackageCodebaseType, ItemCodebase, GitCodebase, FilesystemCodebase, LocalCodebase
40
40
  from .package import Package, RequirementOperator, PackageRequirement
@@ -78,3 +78,4 @@ from .prompt_item import Prompt, PromptItem, PromptType
78
78
  from .compute import ClusterProvider, ComputeType, ComputeStatus, Toleration, DeploymentResource, DeploymentResources, \
79
79
  NodePool, AuthenticationIntegration, Authentication, ComputeCluster, ComputeContext, Compute, KubernetesCompute, \
80
80
  ServiceDriver
81
+ from .gis_item import ItemGis, Layer
@@ -43,6 +43,7 @@ class AnnotationType(str, Enum):
43
43
  SEGMENTATION = "binary"
44
44
  SUBTITLE = "subtitle"
45
45
  TEXT = "text_mark"
46
+ GIS = "gis"
46
47
 
47
48
 
48
49
  class ViewAnnotationOptions(str, Enum):
@@ -1796,6 +1797,8 @@ class FrameAnnotation(entities.BaseEntity):
1796
1797
  annotation = entities.Note.from_json(_json)
1797
1798
  elif _json['type'] == 'pose':
1798
1799
  annotation = entities.Pose.from_json(_json)
1800
+ elif _json['type'] == 'gis':
1801
+ annotation = entities.Gis.from_json(_json)
1799
1802
  else:
1800
1803
  annotation = entities.UndefinedAnnotationType.from_json(_json)
1801
1804
  return annotation
@@ -17,3 +17,4 @@ from .pose import Pose
17
17
  from .text import Text
18
18
  from .free_text import FreeText
19
19
  from .ref_image import RefImage
20
+ from .gis import Gis, GisType
@@ -0,0 +1,69 @@
1
+ from . import BaseAnnotationDefinition
2
+
3
+
4
+ class GisType:
5
+ """
6
+ State enum
7
+ """
8
+ BOX = 'box'
9
+ POLYGON = 'polygon'
10
+ POLYLINE = 'polyline'
11
+ POINT = 'point'
12
+
13
+
14
+ class Gis(BaseAnnotationDefinition):
15
+ """
16
+ Box annotation object
17
+ Can create a box using 2 point using: "top", "left", "bottom", "right" (to form a box [(left, top), (right, bottom)])
18
+ For rotated box add the "angel"
19
+ """
20
+ type = "gis"
21
+
22
+ def __init__(self,
23
+ annotation_type: GisType,
24
+ geo,
25
+ label=None,
26
+ attributes=None,
27
+ description=None,
28
+ ):
29
+ """
30
+ Can create gis annotation using points:
31
+
32
+ :param geo: list of points
33
+ :param label: annotation label
34
+ :param attributes: a list of attributes for the annotation
35
+ :param description:
36
+
37
+ :return:
38
+ """
39
+ super().__init__(description=description, attributes=attributes)
40
+
41
+ if geo is None:
42
+ raise ValueError('geo must be provided')
43
+ if annotation_type is None:
44
+ raise ValueError('annotation_type must be provided')
45
+ self.label = label
46
+ self.annotation = None
47
+ self.geo = geo
48
+ self.annotation_type = annotation_type
49
+
50
+ def to_coordinates(self, color):
51
+ return {
52
+ "geo_type": self.annotation_type,
53
+ "wgs84_geo_coordinates": self.geo
54
+ }
55
+
56
+ @classmethod
57
+ def from_json(cls, _json):
58
+ json_coordinates = _json.get("coordinates", {}) if "coordinates" in _json else _json.get("data", {})
59
+ coordinates = json_coordinates.get("wgs84_geo_coordinates", None)
60
+ annotations_type = json_coordinates.get("geo_type", None)
61
+ if coordinates is None:
62
+ raise ValueError('can not find "coordinates" or "data" in annotation. id: {}'.format(_json["id"]))
63
+
64
+ return cls(
65
+ annotation_type=annotations_type,
66
+ geo=coordinates,
67
+ label=_json["label"],
68
+ attributes=_json.get("attributes", None)
69
+ )
dtlpy/entities/app.py CHANGED
@@ -199,7 +199,7 @@ class App(entities.BaseEntity):
199
199
  name=_json.get('name', None),
200
200
  url=_json.get('url', None),
201
201
  created_at=_json.get('createdAt', None),
202
- updated_at=_json.get('updateAt', None),
202
+ updated_at=_json.get('updatedAt', None),
203
203
  creator=_json.get('creator', None),
204
204
  project_id=_json.get('projectId', None),
205
205
  org_id=_json.get('orgId', None),
dtlpy/entities/filters.py CHANGED
@@ -196,6 +196,8 @@ class Filters:
196
196
  """
197
197
  if method is None:
198
198
  method = self.method
199
+ if 'metadata.system.refs.metadata' in field and self.resource == FiltersResource.ITEM:
200
+ logger.warning('Filtering by metadata.system.refs.metadata may cause incorrect results. please use match operator')
199
201
 
200
202
  # create SingleFilter object and add to self.filter_list
201
203
  if method == FiltersMethod.OR:
@@ -328,7 +330,7 @@ class Filters:
328
330
  self._unique_fields = ['type']
329
331
  self.add(field='type',
330
332
  values=['box', 'class', 'comparison', 'ellipse', 'point', 'segment', 'polyline', 'binary',
331
- 'subtitle', 'cube', 'cube_3d', 'pose', 'text_mark', 'text', 'ref_image'],
333
+ 'subtitle', 'cube', 'cube_3d', 'pose', 'text_mark', 'text', 'ref_image', 'gis'],
332
334
  operator=FiltersOperations.IN,
333
335
  method=FiltersMethod.AND)
334
336
 
@@ -0,0 +1,108 @@
1
+ import json
2
+ from typing import List
3
+ import logging
4
+ import os
5
+
6
+ logger = logging.getLogger(name='dtlpy')
7
+
8
+
9
+ class Layer:
10
+ def __init__(self, name, layer_type, url):
11
+ self.name = name
12
+ self.type = layer_type
13
+ self.url = url
14
+
15
+
16
+ class ItemGis:
17
+ def __init__(self,
18
+ name: str,
19
+ data: dict = None,
20
+ layer: Layer = None,
21
+ optional_layers: List[Layer] = None,
22
+ zoom: int = None,
23
+ min_zoom: int = None,
24
+ max_zoom: int = None,
25
+ epsg: str = None,
26
+ bounds: list = None,
27
+ aoi: list = None):
28
+ self.name = name
29
+
30
+ self.layer = layer or Layer(name=data.get('name', None), layer_type=data.get('type', None),
31
+ url=data.get('url', None))
32
+ if self.layer is None:
33
+ raise ValueError('layer is required')
34
+ elif self.layer is not None and isinstance(self.layer, dict):
35
+ self.layer = Layer(name=self.layer.get('name', None), layer_type=self.layer.get('type', None), url=self.layer.get('url', None))
36
+
37
+
38
+ self.optional_layers = optional_layers or [
39
+ Layer(name=layer.get('name', None), layer_type=layer.get('type', None), url=layer.get('url', None)) for
40
+ layer in data.get('optionalLayers', [])]
41
+
42
+ if self.optional_layers is not None and isinstance(optional_layers, list):
43
+ new_optional_layers = []
44
+ for op_layer in self.optional_layers:
45
+ if isinstance(op_layer, dict):
46
+ new_optional_layers.append(Layer(name=op_layer.get('name', None), layer_type=op_layer.get('type', None), url=op_layer.get('url', None)))
47
+ else:
48
+ new_optional_layers.append(op_layer)
49
+ self.optional_layers = new_optional_layers
50
+
51
+ self.epsg = epsg or data.get('epsg', None)
52
+ if self.epsg is None:
53
+ raise ValueError('epsg is required')
54
+
55
+ self.zoom = zoom or data.get('zoom', None)
56
+ self.min_zoom = min_zoom or data.get('minZoom', None)
57
+ self.max_zoom = max_zoom or data.get('maxZoom', None)
58
+ self.bounds = bounds or data.get('bounds', None)
59
+ self.aoi = aoi or data.get('aoi', None)
60
+
61
+ def to_json(self):
62
+ _json = {
63
+ "type": "gis",
64
+ "shebang": "dataloop",
65
+ "metadata": {
66
+ "dltype": "gis"
67
+ },
68
+ 'layer': {
69
+ 'name': self.layer.name,
70
+ 'type': self.layer.type,
71
+ 'url': self.layer.url
72
+ },
73
+ "epsg": self.epsg
74
+ }
75
+ if self.optional_layers is not None:
76
+ _json['optionalLayers'] = [
77
+ {
78
+ 'name': layer.name,
79
+ 'type': layer.type,
80
+ 'url': layer.url
81
+ } for layer in self.optional_layers
82
+ ]
83
+ if self.zoom is not None:
84
+ _json['zoom'] = self.zoom
85
+ if self.min_zoom is not None:
86
+ _json['minZoom'] = self.min_zoom
87
+ if self.max_zoom is not None:
88
+ _json['maxZoom'] = self.max_zoom
89
+ if self.bounds is not None:
90
+ _json['bounds'] = self.bounds
91
+ if self.aoi is not None:
92
+ _json['aoi'] = self.aoi
93
+ return _json
94
+
95
+ @classmethod
96
+ def from_local_file(cls, filepath):
97
+ """
98
+ Create a new prompt item from a file
99
+ :param filepath: path to the file
100
+ :return: PromptItem object
101
+ """
102
+ if os.path.exists(filepath) is False:
103
+ raise FileNotFoundError(f'File does not exists: {filepath}')
104
+ if 'json' not in os.path.splitext(filepath)[-1]:
105
+ raise ValueError(f'Expected path to json item, got {os.path.splitext(filepath)[-1]}')
106
+ with open(filepath, 'r', encoding='utf-8') as f:
107
+ data = json.load(f)
108
+ return cls(name=os.path.basename(filepath), data=data)
dtlpy/entities/model.py CHANGED
@@ -388,13 +388,12 @@ class Model(entities.BaseEntity):
388
388
 
389
389
  @property
390
390
  def id_to_label_map(self):
391
+ # default
391
392
  if 'id_to_label_map' not in self.configuration:
392
- # default
393
- if self.ontology_id == 'null' or self.ontology_id is None:
394
- self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in enumerate(self.labels)}
395
- else:
396
- self.configuration['id_to_label_map'] = {int(idx): lbl.tag for idx, lbl in
397
- enumerate(self.ontology.labels)}
393
+ if not (self.dataset_id == 'null' or self.dataset_id is None):
394
+ self.labels = [label.tag for label in self.dataset.labels]
395
+ self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in enumerate(self.labels)}
396
+ # use existing
398
397
  else:
399
398
  self.configuration['id_to_label_map'] = {int(idx): lbl for idx, lbl in
400
399
  self.configuration['id_to_label_map'].items()}
@@ -488,26 +488,35 @@ class Pipeline(entities.BaseEntity):
488
488
  """
489
489
  return self.pipelines.pause(pipeline=self, keep_triggers_active=keep_triggers_active)
490
490
 
491
- def execute(self, execution_input=None):
491
+ def execute(self, execution_input=None, node_id: str = None):
492
492
  """
493
493
  execute a pipeline and return to execute
494
494
 
495
495
  :param execution_input: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}
496
+ :param str node_id: node id to execute
496
497
  :return: entities.PipelineExecution object
497
498
  """
498
- execution = self.pipeline_executions.create(pipeline_id=self.id, execution_input=execution_input)
499
+ execution = self.pipeline_executions.create(
500
+ pipeline_id=self.id,
501
+ execution_input=execution_input,
502
+ node_id=node_id
503
+ )
499
504
  return execution
500
505
 
501
- def execute_batch(self,
502
- filters,
503
- execution_inputs=None,
504
- wait=True):
506
+ def execute_batch(
507
+ self,
508
+ filters,
509
+ execution_inputs=None,
510
+ wait=True,
511
+ node_id: str = None
512
+ ):
505
513
  """
506
514
  execute a pipeline and return to execute
507
515
 
508
516
  :param execution_inputs: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}, that represent the extra inputs of the function
509
517
  :param filters: Filters entity for a filtering before execute
510
518
  :param bool wait: wait until create task finish
519
+ :param str node_id: node id to execute
511
520
  :return: entities.PipelineExecution object
512
521
 
513
522
  **Example**:
@@ -518,10 +527,13 @@ class Pipeline(entities.BaseEntity):
518
527
  execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
519
528
  filters=dl.Filters(field='dir', values='/test', context={'datasets': [dataset.id]))
520
529
  """
521
- command = self.pipeline_executions.create_batch(pipeline_id=self.id,
522
- execution_inputs=execution_inputs,
523
- filters=filters,
524
- wait=wait)
530
+ command = self.pipeline_executions.create_batch(
531
+ pipeline_id=self.id,
532
+ execution_inputs=execution_inputs,
533
+ filters=filters,
534
+ wait=wait,
535
+ node_id=node_id
536
+ )
525
537
  return command
526
538
 
527
539
  def reset(self, stop_if_running: bool = False):
@@ -578,4 +590,3 @@ class Pipeline(entities.BaseEntity):
578
590
  for variable in self.variables:
579
591
  if variable.name in keys:
580
592
  variable.value = kwargs[variable.name]
581
-
dtlpy/entities/service.py CHANGED
@@ -567,14 +567,14 @@ class Service(entities.BaseEntity):
567
567
  """
568
568
  return self.services.update(service=self, force=force)
569
569
 
570
- def delete(self):
570
+ def delete(self, force: bool = False):
571
571
  """
572
572
  Delete Service object
573
573
 
574
574
  :return: True
575
575
  :rtype: bool
576
576
  """
577
- return self.services.delete(service_id=self.id)
577
+ return self.services.delete(service_id=self.id, force=force)
578
578
 
579
579
  def status(self):
580
580
  """
@@ -759,6 +759,32 @@ class Service(entities.BaseEntity):
759
759
  wait=wait)
760
760
  return execution
761
761
 
762
+ def rerun_batch(self,
763
+ filters,
764
+ wait=True
765
+ ):
766
+ """
767
+ rerun a executions on an existing service
768
+
769
+ **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a Filter.
770
+
771
+ :param filters: Filters entity for a filtering before rerun
772
+ :param bool wait: wait until create task finish
773
+ :return: rerun command
774
+ :rtype: dtlpy.entities.command.Command
775
+
776
+ **Example**:
777
+
778
+ .. code-block:: python
779
+
780
+ command = service.executions.rerun_batch(
781
+ filters=dl.Filters(field='id', values=['executionId'], operator=dl.FiltersOperations.IN, resource=dl.FiltersResource.EXECUTION))
782
+ """
783
+ execution = self.executions.rerun_batch(service_id=self.id,
784
+ filters=filters,
785
+ wait=wait)
786
+ return execution
787
+
762
788
  def activate_slots(
763
789
  self,
764
790
  project_id: str = None,
@@ -271,7 +271,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
271
271
  :param bool overwrite: overwrite the data path (download again). default is False
272
272
  """
273
273
  # define paths
274
- dataloop_path = os.path.join(os.path.expanduser('~'), '.dataloop')
274
+ dataloop_path = service_defaults.DATALOOP_PATH
275
275
  root_path = self.adapter_defaults.resolve("root_path", root_path)
276
276
  data_path = self.adapter_defaults.resolve("data_path", data_path)
277
277
  output_path = self.adapter_defaults.resolve("output_path", output_path)
dtlpy/new_instance.py CHANGED
@@ -13,7 +13,7 @@ class Dtlpy:
13
13
  Ontology, Label, Task, TaskPriority, ConsensusTaskType, Assignment, Service, Package, Codebase, Model,
14
14
  PackageModule, PackageFunction,
15
15
  # annotations
16
- Box, Cube, Cube3d, Point, Note, Message, Segmentation, Ellipse, Classification, Subtitle, Polyline, Pose,
16
+ Box, Cube, Cube3d, Point, Note, Message, Segmentation, Ellipse, Classification, Subtitle, Polyline, Pose, Gis, GisType,
17
17
  Description,
18
18
  Polygon, Text, FreeText, RefImage,
19
19
  # filters
@@ -38,7 +38,7 @@ class Dtlpy:
38
38
  Webhook, HttpMethod,
39
39
  ViewAnnotationOptions, AnnotationStatus, AnnotationType,
40
40
  ItemStatus, ExecutionStatus, ExportMetadata,
41
- PromptItem, Prompt, PromptType,
41
+ PromptItem, Prompt, PromptType, ItemGis, Layer,
42
42
  ItemLink, UrlLink, LinkTypeEnum,
43
43
  Modality, ModalityTypeEnum, ModalityRefTypeEnum,
44
44
  Workload, WorkloadUnit, ItemAction,
@@ -4,6 +4,9 @@ import logging
4
4
  import json
5
5
  import jwt
6
6
  import os
7
+ from PIL import Image
8
+ from io import BytesIO
9
+ import base64
7
10
 
8
11
  from .. import entities, exceptions, miscellaneous, _api_reference
9
12
  from ..services.api_client import ApiClient
@@ -309,7 +312,7 @@ class Annotations:
309
312
  """
310
313
  # get item's annotations
311
314
  annotations = self.list()
312
- if 'text' in self.item.metadata.get('system').get('mimetype', ''):
315
+ if 'text' in self.item.metadata.get('system').get('mimetype', '') or 'json' in self.item.metadata.get('system').get('mimetype', ''):
313
316
  annotation_format = entities.ViewAnnotationOptions.JSON
314
317
  elif 'audio' not in self.item.metadata.get('system').get('mimetype', ''):
315
318
  # height/weight
@@ -487,12 +490,9 @@ class Annotations:
487
490
  status = True
488
491
  result = w_annotation
489
492
  else:
490
- url_path = '/annotations/{}'.format(annotation_id)
491
- if system_metadata:
492
- url_path += '?system=true'
493
- suc, response = self._client_api.gen_request(req_type='put',
494
- path=url_path,
495
- json_req=json_req)
493
+ suc, response = self._update_annotation_req(annotation_json=json_req,
494
+ system_metadata=system_metadata,
495
+ annotation_id=annotation_id)
496
496
  if suc:
497
497
  result = entities.Annotation.from_json(_json=response.json(),
498
498
  annotations=self,
@@ -507,6 +507,15 @@ class Annotations:
507
507
  result = traceback.format_exc()
508
508
  return status, result
509
509
 
510
+ def _update_annotation_req(self, annotation_json, system_metadata, annotation_id):
511
+ url_path = '/annotations/{}'.format(annotation_id)
512
+ if system_metadata:
513
+ url_path += '?system=true'
514
+ suc, response = self._client_api.gen_request(req_type='put',
515
+ path=url_path,
516
+ json_req=annotation_json)
517
+ return suc, response
518
+
510
519
  @_api_reference.add(path='/annotations/{annotationId}', method='put')
511
520
  def update(self, annotations, system_metadata=False):
512
521
  """
@@ -572,11 +581,12 @@ class Annotations:
572
581
  last_frame = frame
573
582
  return annotation
574
583
 
575
- def _create_batches_for_upload(self, annotations):
584
+ def _create_batches_for_upload(self, annotations, merge=False):
576
585
  """
577
586
  receives a list of annotations and split them into batches to optimize the upload
578
587
 
579
588
  :param annotations: list of all annotations
589
+ :param merge: bool - merge the new binary annotations with the existing annotations
580
590
  :return: batch_annotations: list of list of annotation. each batch with size self._upload_batch_size
581
591
  """
582
592
  annotation_batches = list()
@@ -601,8 +611,107 @@ class Annotations:
601
611
  single_batch = list()
602
612
  if len(single_batch) > 0:
603
613
  annotation_batches.append(single_batch)
614
+ if merge and self.item:
615
+ annotation_batches = self._merge_new_annotations(annotation_batches)
616
+ annotation_batches = self._merge_to_exits_annotations(annotation_batches)
604
617
  return annotation_batches
605
618
 
619
+ def _merge_binary_annotations(self, data_url1, data_url2, item_width, item_height):
620
+ # Decode base64 data
621
+ img_data1 = base64.b64decode(data_url1.split(",")[1])
622
+ img_data2 = base64.b64decode(data_url2.split(",")[1])
623
+
624
+ # Convert binary data to images
625
+ img1 = Image.open(BytesIO(img_data1))
626
+ img2 = Image.open(BytesIO(img_data2))
627
+
628
+ # Create a new image with the target item size
629
+ merged_img = Image.new('RGBA', (item_width, item_height))
630
+
631
+ # Paste both images on the new canvas at their original sizes and positions
632
+ # Adjust positioning logic if needed (assuming top-left corner for both images here)
633
+ merged_img.paste(img1, (0, 0), img1) # Use img1 as a mask to handle transparency
634
+ merged_img.paste(img2, (0, 0), img2) # Overlay img2 at the same position
635
+
636
+ # Save the merged image to a buffer
637
+ buffer = BytesIO()
638
+ merged_img.save(buffer, format="PNG")
639
+ merged_img_data = buffer.getvalue()
640
+
641
+ # Encode the merged image back to a base64 string
642
+ merged_data_url = "data:image/png;base64," + base64.b64encode(merged_img_data).decode()
643
+
644
+ return merged_data_url
645
+
646
+ def _merge_new_annotations(self, annotations_batch):
647
+ """
648
+ Merge the new binary annotations with the existing annotations
649
+ :param annotations_batch: list of list of annotation. each batch with size self._upload_batch_size
650
+ :return: merged_annotations_batch: list of list of annotation. each batch with size self._upload_batch_size
651
+ """
652
+ for annotations in annotations_batch:
653
+ for annotation in annotations:
654
+ if annotation['type'] == 'binary' and not annotation.get('clean', False):
655
+ to_merge = [a for a in annotations if
656
+ not a.get('clean', False) and a.get("metadata", {}).get('system', {}).get('objectId',
657
+ None) ==
658
+ annotation.get("metadata", {}).get('system', {}).get('objectId', None) and a['label'] ==
659
+ annotation['label']]
660
+ if len(to_merge) == 0:
661
+ # no annotation to merge with
662
+ continue
663
+ for a in to_merge:
664
+ if a['coordinates'] == annotation['coordinates']:
665
+ continue
666
+ merged_data_url = self._merge_binary_annotations(a['coordinates'], annotation['coordinates'],
667
+ self.item.width, self.item.height)
668
+ annotation['coordinates'] = merged_data_url
669
+ a['clean'] = True
670
+ return [[a for a in annotations if not a.get('clean', False)] for annotations in annotations_batch]
671
+
672
+ def _merge_to_exits_annotations(self, annotations_batch):
673
+ filters = entities.Filters(resource=entities.FiltersResource.ANNOTATION, field='type', values='binary')
674
+ filters.add(field='itemId', values=self.item.id, method=entities.FiltersMethod.AND)
675
+ exist_annotations = self.list(filters=filters).annotations or list()
676
+ to_delete = list()
677
+ for annotations in annotations_batch:
678
+ for ann in annotations:
679
+ if ann['type'] == 'binary':
680
+ to_merge = [a for a in exist_annotations if
681
+ a.object_id == ann.get("metadata", {}).get('system', {}).get('objectId',
682
+ None) and a.label == ann[
683
+ 'label']]
684
+ if len(to_merge) == 0:
685
+ # no annotation to merge with
686
+ continue
687
+ if to_merge[0].coordinates == ann['coordinates']:
688
+ # same annotation
689
+ continue
690
+ if len(to_merge) > 1:
691
+ raise exceptions.PlatformException('400', 'Multiple annotations with the same label')
692
+ # merge
693
+ exist_annotations.remove(to_merge[0])
694
+ merged_data_url = self._merge_binary_annotations(to_merge[0].coordinates, ann['coordinates'],
695
+ self.item.width, self.item.height)
696
+ json_ann = to_merge[0].to_json()
697
+ json_ann['coordinates'] = merged_data_url
698
+ suc, response = self._update_annotation_req(annotation_json=json_ann,
699
+ system_metadata=True,
700
+ annotation_id=to_merge[0].id)
701
+ if not suc:
702
+ raise exceptions.PlatformException(response)
703
+ if suc:
704
+ result = entities.Annotation.from_json(_json=response.json(),
705
+ annotations=self,
706
+ dataset=self._dataset,
707
+ item=self._item)
708
+ exist_annotations.append(result)
709
+ to_delete.append(ann)
710
+ if len(to_delete) > 0:
711
+ annotations_batch = [[a for a in annotations if a not in to_delete] for annotations in annotations_batch]
712
+
713
+ return annotations_batch
714
+
606
715
  def _upload_single_batch(self, annotation_batch):
607
716
  try:
608
717
  suc, response = self._client_api.gen_request(req_type='post',
@@ -650,14 +759,15 @@ class Annotations:
650
759
  logger.info('Annotation/s uploaded successfully. num: {}'.format(len(uploaded_annotations)))
651
760
  return uploaded_annotations
652
761
 
653
- async def _async_upload_annotations(self, annotations):
762
+ async def _async_upload_annotations(self, annotations, merge=False):
654
763
  """
655
764
  Async function to run from the uploader. will use asyncio to not break the async
656
- :param annotations:
765
+ :param annotations: list of all annotations
766
+ :param merge: bool - merge the new binary annotations with the existing annotations
657
767
  :return:
658
768
  """
659
769
  async with self._client_api.event_loop.semaphore('annotations.upload'):
660
- annotation_batch = self._create_batches_for_upload(annotations=annotations)
770
+ annotation_batch = self._create_batches_for_upload(annotations=annotations, merge=merge)
661
771
  output_annotations = list()
662
772
  for annotations_list in annotation_batch:
663
773
  success, response = await self._client_api.gen_async_request(req_type='post',
@@ -679,7 +789,7 @@ class Annotations:
679
789
  return result
680
790
 
681
791
  @_api_reference.add(path='/items/{itemId}/annotations', method='post')
682
- def upload(self, annotations) -> entities.AnnotationCollection:
792
+ def upload(self, annotations, merge=False) -> entities.AnnotationCollection:
683
793
  """
684
794
  Upload a new annotation/annotations. You must first create the annotation using the annotation *builder* method.
685
795
 
@@ -687,6 +797,7 @@ class Annotations:
687
797
 
688
798
  :param List[dtlpy.entities.annotation.Annotation] or dtlpy.entities.annotation.Annotation annotations: list or
689
799
  single annotation of type Annotation
800
+ :param bool merge: optional - merge the new binary annotations with the existing annotations
690
801
  :return: list of annotation objects
691
802
  :rtype: entities.AnnotationCollection
692
803
 
@@ -718,7 +829,7 @@ class Annotations:
718
829
  logger.warning('Annotation upload receives 0 annotations. Not doing anything')
719
830
  out_annotations = list()
720
831
  else:
721
- annotation_batches = self._create_batches_for_upload(annotations=annotations)
832
+ annotation_batches = self._create_batches_for_upload(annotations=annotations, merge=merge)
722
833
  out_annotations = self._upload_annotations_batches(annotation_batches=annotation_batches)
723
834
  out_annotations = entities.AnnotationCollection.from_json(_json=out_annotations,
724
835
  item=self.item)
@@ -1,6 +1,8 @@
1
1
  import threading
2
2
  import logging
3
3
  import time
4
+ from copy import deepcopy
5
+
4
6
  import numpy as np
5
7
 
6
8
  from .. import exceptions, entities, repositories, miscellaneous, _api_reference
@@ -351,6 +353,58 @@ class Executions:
351
353
  command = command.wait(timeout=0)
352
354
  return command
353
355
 
356
+ @_api_reference.add(path='/executions/rerun', method='post')
357
+ def rerun_batch(self,
358
+ filters,
359
+ service_id: str = None,
360
+ wait=True
361
+ ):
362
+ """
363
+ rerun a executions on an existing service
364
+
365
+ **Prerequisites**: You must be in the role of an *owner* or *developer*. You must have a Filter.
366
+
367
+ :param filters: Filters entity for a filtering before rerun
368
+ :param str service_id: service id to rerun on
369
+ :param bool wait: wait until create task finish
370
+ :return: rerun command
371
+ :rtype: dtlpy.entities.command.Command
372
+
373
+ **Example**:
374
+
375
+ .. code-block:: python
376
+
377
+ command = service.executions.rerun_batch(
378
+ filters=dl.Filters(field='id', values=['executionId'], operator=dl.FiltersOperations.IN, resource=dl.FiltersResource.EXECUTION))
379
+ """
380
+ url_path = '/executions/rerun'
381
+
382
+ if filters is None:
383
+ raise exceptions.PlatformException('400', 'Please provide filter')
384
+
385
+ if filters.resource != entities.FiltersResource.EXECUTION:
386
+ raise exceptions.PlatformException(
387
+ error='400',
388
+ message='Filters resource must to be FiltersResource.EXECUTION. Got: {!r}'.format(filters.resource))
389
+
390
+ if service_id is not None and not filters.has_field('serviceId'):
391
+ filters = deepcopy(filters)
392
+ filters.add(field='serviceId', values=service_id, method=entities.FiltersMethod.AND)
393
+
394
+ success, response = self._client_api.gen_request(req_type='post',
395
+ path=url_path,
396
+ json_req={'query': filters.prepare()['filter']})
397
+ # exception handling
398
+ if not success:
399
+ raise exceptions.PlatformException(response)
400
+
401
+ response_json = response.json()
402
+ command = entities.Command.from_json(_json=response_json,
403
+ client_api=self._client_api)
404
+ if wait:
405
+ command = command.wait(timeout=0)
406
+ return command
407
+
354
408
  def _list(self, filters: entities.Filters):
355
409
  """
356
410
  List service executions
@@ -465,6 +465,11 @@ class Items:
465
465
  raise exceptions.PlatformException('400',
466
466
  'Must provide update_values or system_update_values')
467
467
 
468
+ if item is not None and value_to_update:
469
+ raise exceptions.PlatformException('400',
470
+ 'Cannot provide "update_values" or "system_update_values" with a specific "item" for an individual update. '
471
+ 'These parameters are intended only for bulk updates using filters.')
472
+
468
473
  # update item
469
474
  if item is not None:
470
475
  json_req = miscellaneous.DictDiffer.diff(origin=item._platform_dict,
@@ -638,11 +643,6 @@ class Items:
638
643
  item_metadata={'Hellow': 'Word'}
639
644
  )
640
645
  """
641
- # fix remote path
642
- if remote_path is not None:
643
- if not remote_path.startswith('/'):
644
- remote_path = '/' + remote_path
645
-
646
646
  # initiate and use uploader
647
647
  uploader = repositories.Uploader(items_repository=self, output_entity=output_entity, no_output=no_output)
648
648
  return uploader.upload(
@@ -38,13 +38,13 @@ class Messages:
38
38
 
39
39
  messages = dl.messages.list(context={project: id})
40
40
  """
41
- user = self._client_api.info()['user_email']
42
41
 
42
+ user = self._client_api.info()['user_email']
43
43
  if not user:
44
44
  raise exceptions.PlatformException(error='400',
45
45
  message='No user in JWT, please login')
46
46
 
47
- url = '/inbox/message/user/{}'.format(user)
47
+ url = '/inbox/message/user'
48
48
 
49
49
  query_params = {
50
50
  'newOnly': new_only,
@@ -194,16 +194,21 @@ class PipelineExecutions:
194
194
  return paged
195
195
 
196
196
  @_api_reference.add(path='/pipelines/{pipelineId}/execute', method='post')
197
- def create(self,
198
- pipeline_id: str = None,
199
- execution_input=None):
197
+ def create(
198
+ self,
199
+ pipeline_id: str = None,
200
+ execution_input=None,
201
+ node_id: str = None
202
+ ):
200
203
  """
201
- Execute a pipeline and return the execute.
204
+ Execute a pipeline.
202
205
 
203
206
  **prerequisites**: You must be an *owner* or *developer* to use this method.
204
207
 
205
208
  :param pipeline_id: pipeline id
206
209
  :param execution_input: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}
210
+ :param node_id: node id to start from
211
+
207
212
  :return: entities.PipelineExecution object
208
213
  :rtype: dtlpy.entities.pipeline_execution.PipelineExecution
209
214
 
@@ -234,6 +239,9 @@ class PipelineExecutions:
234
239
  else:
235
240
  raise exceptions.PlatformException('400', 'Unknown input type')
236
241
 
242
+ if node_id is not None:
243
+ payload['nodeId'] = node_id
244
+
237
245
  success, response = self._client_api.gen_request(
238
246
  path='/pipelines/{}/execute'.format(pipeline_id),
239
247
  req_type='POST',
@@ -249,13 +257,16 @@ class PipelineExecutions:
249
257
  return execution
250
258
 
251
259
  @_api_reference.add(path='/pipelines/{pipelineId}/execute', method='post')
252
- def create_batch(self,
253
- pipeline_id: str,
254
- filters,
255
- execution_inputs=None,
256
- wait=True):
260
+ def create_batch(
261
+ self,
262
+ pipeline_id: str,
263
+ filters,
264
+ execution_inputs=None,
265
+ wait=True,
266
+ node_id: str = None
267
+ ):
257
268
  """
258
- Execute a pipeline and return the execute.
269
+ Create batch executions of a pipeline.
259
270
 
260
271
  **prerequisites**: You must be an *owner* or *developer* to use this method.
261
272
 
@@ -301,6 +312,9 @@ class PipelineExecutions:
301
312
  payload['batch']['query'] = filters.prepare()
302
313
  payload['batch']['args'] = extra_input
303
314
 
315
+ if node_id is not None:
316
+ payload['nodeId'] = node_id
317
+
304
318
  success, response = self._client_api.gen_request(
305
319
  path='/pipelines/{}/execute'.format(pipeline_id),
306
320
  req_type='POST',
@@ -1069,6 +1069,8 @@ class Services:
1069
1069
 
1070
1070
  if project_id is not None:
1071
1071
  payload['projectId'] = project_id
1072
+ else:
1073
+ payload['projectId'] = service.project_id
1072
1074
 
1073
1075
  # request
1074
1076
  success, response = self._client_api.gen_request(req_type='post',
@@ -642,10 +642,19 @@ class Tasks:
642
642
  if filters is None and items is None:
643
643
  query = entities.Filters().prepare()
644
644
  elif filters is None:
645
- if not isinstance(items, list):
646
- items = [items]
645
+ item_list = list()
646
+ if isinstance(items, entities.PagedEntities):
647
+ for page in items:
648
+ for item in page:
649
+ item_list.append(item)
650
+ elif isinstance(items, list):
651
+ item_list = items
652
+ elif isinstance(items, entities.Item):
653
+ item_list.append(items)
654
+ else:
655
+ raise exceptions.PlatformException('400', 'Unknown items type')
647
656
  query = entities.Filters(field='id',
648
- values=[item.id for item in items],
657
+ values=[item.id for item in item_list],
649
658
  operator=entities.FiltersOperations.IN,
650
659
  use_defaults=False).prepare()
651
660
  else:
@@ -133,11 +133,13 @@ class Uploader:
133
133
  item_metadata,
134
134
  export_version: str = entities.ExportVersion.V1,
135
135
  item_description=None):
136
-
136
+ # fix remote path
137
137
  if remote_path is None:
138
- remote_path = '/'
138
+ remote_path = "/"
139
+ if not remote_path.startswith('/'):
140
+ remote_path = f"/{remote_path}"
139
141
  if not remote_path.endswith("/"):
140
- remote_path += "/"
142
+ remote_path = f"{remote_path}/"
141
143
  if file_types is not None and not isinstance(file_types, list):
142
144
  msg = '"file_types" should be a list of file extension. e.g [".jpg", ".png"]'
143
145
  raise PlatformException(error="400", message=msg)
@@ -279,6 +281,12 @@ class Uploader:
279
281
  elif isinstance(upload_item_element, entities.PromptItem):
280
282
  upload_elem = upload_element.PromptUploadElement(all_upload_elements=all_upload_elements)
281
283
 
284
+ elif isinstance(upload_item_element, entities.ItemGis):
285
+ buffer = io.BytesIO(json.dumps(upload_item_element.to_json()).encode('utf-8'))
286
+ buffer.name = upload_item_element.name
287
+ all_upload_elements['upload_item_element'] = buffer
288
+ upload_elem = upload_element.BinaryUploadElement(all_upload_elements=all_upload_elements)
289
+
282
290
  elif isinstance(upload_item_element, bytes) or \
283
291
  isinstance(upload_item_element, io.BytesIO) or \
284
292
  isinstance(upload_item_element, io.BufferedReader) or \
@@ -451,7 +459,7 @@ class Uploader:
451
459
  return item, response.headers.get('x-item-op', 'na')
452
460
 
453
461
  async def __upload_single_item_wrapper(self, element, pbar, reporter, mode):
454
- async with self.items_repository._client_api.event_loop.semaphore('items.upload'):
462
+ async with self.items_repository._client_api.event_loop.semaphore('items.upload', 5):
455
463
  # assert isinstance(element, UploadElement)
456
464
  item = False
457
465
  err = None
@@ -74,8 +74,6 @@ class PlatformError(Exception):
74
74
  super().__init__(msg)
75
75
 
76
76
 
77
-
78
-
79
77
  class Callbacks:
80
78
  def __init__(self):
81
79
  self._callbacks = {}
@@ -1172,7 +1170,12 @@ class ApiClient:
1172
1170
  def callback(bytes_read):
1173
1171
  pass
1174
1172
 
1175
- timeout = aiohttp.ClientTimeout(total=2 * 60)
1173
+ timeout = aiohttp.ClientTimeout(
1174
+ total=None, # Disable overall timeout
1175
+ connect=2 * 60, # Set connect timeout (in seconds)
1176
+ sock_read=10 * 60, # Set read timeout for socket read operations
1177
+ sock_connect=2 * 60 # Set timeout for connection setup
1178
+ )
1176
1179
  async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:
1177
1180
  try:
1178
1181
  form = aiohttp.FormData({})
@@ -1634,6 +1637,30 @@ class ApiClient:
1634
1637
  self._send_login_event(user_type='human', login_type='refresh')
1635
1638
  return res
1636
1639
 
1640
+ def generate_api_key(self, description: str = None, login: bool = False):
1641
+ """
1642
+ Generate an API key for a user
1643
+ :param description: description for the API key
1644
+ :param login: if True, login with the new API key
1645
+ :return: User token
1646
+ """
1647
+ user_email = self.info()['user_email']
1648
+ payload = {
1649
+ 'userId': user_email
1650
+ }
1651
+ if description:
1652
+ if not isinstance(description, str):
1653
+ raise ValueError('description should be a string')
1654
+ payload['description'] = description
1655
+ success, response = self.gen_request(req_type='post', path='/apiKeys', json_req=payload)
1656
+ if not success:
1657
+ raise exceptions.PlatformException(response)
1658
+ if login:
1659
+ self.login_api_key(response.json()['jwt'])
1660
+ return True
1661
+
1662
+ return response.json()['jwt']
1663
+
1637
1664
  def _renew_token_with_refresh_token(self):
1638
1665
  renewed = False
1639
1666
  if self.refresh_token_active is False:
@@ -1668,10 +1695,13 @@ class ApiClient:
1668
1695
  'refresh_token': refresh_token
1669
1696
  }
1670
1697
  logger.debug("RefreshToken: Refreshing token via {}".format(token_endpoint))
1671
- resp = requests.request("POST",
1672
- token_endpoint,
1673
- json=payload,
1674
- headers={'content-type': 'application/json'})
1698
+ resp = requests.request(
1699
+ "POST",
1700
+ token_endpoint,
1701
+ json=payload,
1702
+ headers={'content-type': 'application/json'},
1703
+ verify=self.verify
1704
+ )
1675
1705
  if not resp.ok:
1676
1706
  logger.debug('RefreshToken: Failed')
1677
1707
  self.print_bad_response(resp)
@@ -33,9 +33,11 @@ class AsyncThreadEventLoop(threading.Thread):
33
33
  self.loop.run_forever()
34
34
  logger.debug('Ended event loop with bounded semaphore to {}'.format(self.n))
35
35
 
36
- def semaphore(self, name):
36
+ def semaphore(self, name, n=None):
37
+ if n is None:
38
+ n = self.n
37
39
  if name not in self._semaphores:
38
- self._semaphores[name] = asyncio.BoundedSemaphore(self.n)
40
+ self._semaphores[name] = asyncio.BoundedSemaphore(n)
39
41
  return self._semaphores[name]
40
42
 
41
43
  def stop(self):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dtlpy
3
- Version: 1.95.6
3
+ Version: 1.98.8
4
4
  Summary: SDK and CLI for Dataloop platform
5
5
  Home-page: https://github.com/dataloop-ai/dtlpy
6
6
  Author: Dataloop Team
@@ -1,7 +1,7 @@
1
- dtlpy/__init__.py,sha256=nE2SN0AD2rZ_ekF_kD7OzZbSE32H8zV5UM6t_E0LzTw,20647
2
- dtlpy/__version__.py,sha256=3OtkFGDOCL5-ZRo52dswwfcY5vYhh114MOzr2lJhEAA,19
1
+ dtlpy/__init__.py,sha256=fZYNrXe_suwThe-iSg5z45dkhTqiHe7btaxjhITesXE,20677
2
+ dtlpy/__version__.py,sha256=VyHfNvXyywstm8UJU4FD18Pn8gxwDCQgKYhzxKXGUUY,19
3
3
  dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
4
- dtlpy/new_instance.py,sha256=ORhXmIsc8Kut2M1jekKL3dG_adRp7axK-25B4zJNqMU,10091
4
+ dtlpy/new_instance.py,sha256=u_c6JtgqsKCr7TU24-g7_CaST9ghqamMhM4Z0Zxt50w,10121
5
5
  dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
6
6
  dtlpy/assets/lock_open.png,sha256=BH9uyf5uYvgZrDpDw9qCUnT3UbkXG8XbeRmWDpWlV4M,18215
7
7
  dtlpy/assets/main.py,sha256=N1JUsx79qnXI7Hx22C8JOzHJdGHxvrXeTx5UZAxvJfE,1380
@@ -44,11 +44,11 @@ dtlpy/dlp/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
44
44
  dtlpy/dlp/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
45
45
  dtlpy/dlp/dlp.py,sha256=YjNBjeCDTXJ7tj8qdiGZ8lFb8DtPZl-FvViyjxt9xF8,4278
46
46
  dtlpy/dlp/parser.py,sha256=p-TFaiAU2c3QkI97TXzL2LDR3Eq0hGDFrTc9J2jWLh4,30551
47
- dtlpy/entities/__init__.py,sha256=R2kDC9VHOeRSTgXXqNowbf_yZwy7tbAkukvIlPZmPVE,4856
47
+ dtlpy/entities/__init__.py,sha256=Sihb652vYCyCWSQfiYTEGQW0EJsHe7BTk6-S5DOsAb0,4908
48
48
  dtlpy/entities/analytic.py,sha256=5MpYDKPVsZ1MIy20Ju515RWed6P667j4TLxsan2gyNM,11925
49
- dtlpy/entities/annotation.py,sha256=yk-JQzgzXvnDLFrOkmcHQfEtsiPqZeIisv80ksNB-f8,66912
49
+ dtlpy/entities/annotation.py,sha256=sqgnONKbNb9gmPLJBH3mYJhFWeKjoiJ8dt57Cz7C6YA,67020
50
50
  dtlpy/entities/annotation_collection.py,sha256=CEYSBHhhDkC0VJdHsBSrA6TgdKGMcKeI3tFM40UJwS8,29838
51
- dtlpy/entities/app.py,sha256=VA1Sex80H9ebeYbjUYUbvHoyn3a-uqMtGkSauPsn1VM,6957
51
+ dtlpy/entities/app.py,sha256=LQ0cDuqeNtO7iihrTq8MBLrFbZiDVs6AFoAi4UQtUZY,6958
52
52
  dtlpy/entities/app_module.py,sha256=0UiAbBX1q8iEImi3nY7ySWZZHoRRwu0qUXmyXmgVAc4,3645
53
53
  dtlpy/entities/artifact.py,sha256=wtLtBuidOPbnba0ok40JyunCCIBGbAl4bP_ebK39Kk4,5711
54
54
  dtlpy/entities/assignment.py,sha256=Dc1QcfVf67GGcmDDi4ubESDuPkSgjXqdqjTBQ31faUM,14722
@@ -64,13 +64,14 @@ dtlpy/entities/driver.py,sha256=O_QdK1EaLjQyQkmvKsmkNgmvmMb1mPjKnJGxK43KrOA,7197
64
64
  dtlpy/entities/execution.py,sha256=WBiAws-6wZnQQ3y9wyvOeexA3OjxfaRdwDu5dSFYL1g,13420
65
65
  dtlpy/entities/feature.py,sha256=9fFjD0W57anOVSAVU55ypxN_WTCsWTG03Wkc3cAAj78,3732
66
66
  dtlpy/entities/feature_set.py,sha256=niw4MkmrDbD_LWQu1X30uE6U4DCzmFhPTaYeZ6VZDB0,4443
67
- dtlpy/entities/filters.py,sha256=tA-A0dS8nhMbnkHIo-INK6UuKzEPMyCdTs51K1-Vl9Y,22441
67
+ dtlpy/entities/filters.py,sha256=_A7rLc0yuMw1eW4gSu4-hogQzXbNUheRvFxnTQWntuo,22671
68
+ dtlpy/entities/gis_item.py,sha256=Uk-wMBxwcHsImjz4qOjP-EyZAohbRzN43kMpCaVjCXU,3982
68
69
  dtlpy/entities/integration.py,sha256=CA5F1eQCGE_4c_Kry4nWRdeyjHctNnvexcDXg_M5HLU,5734
69
70
  dtlpy/entities/item.py,sha256=G6VVcVCudqeShWigZmNIuKD4OkvTRJ05CeXFXNe3Jk8,29691
70
71
  dtlpy/entities/label.py,sha256=ycDYavIgKhz806plIX-64c07_TeHpDa-V7LnfFVe4Rg,3869
71
72
  dtlpy/entities/links.py,sha256=FAmEwHtsrqKet3c0UHH9u_gHgG6_OwF1-rl4xK7guME,2516
72
73
  dtlpy/entities/message.py,sha256=ApJuaKEqxATpXjNYUjGdYPu3ibQzEMo8-LtJ_4xAcPI,5865
73
- dtlpy/entities/model.py,sha256=LEot0PHOxPSeK9SCzAT6pofbLrbfybFYmr6v9YsiuB4,24927
74
+ dtlpy/entities/model.py,sha256=UKtai_V8ckTNPlhzflmJNHXJvH6BH9UYOwCMWXNZueU,24822
74
75
  dtlpy/entities/node.py,sha256=yPPYDLtNMc6vZbbf4FIffY86y7tkaTvYm42Jb7k3Ofk,39617
75
76
  dtlpy/entities/ontology.py,sha256=ok4p3sLBc_SS5hs2gZr5-gbblrveM7qSIX4z67QSKeQ,31967
76
77
  dtlpy/entities/organization.py,sha256=AMkx8hNIIIjnu5pYlNjckMRuKt6H3lnOAqtEynkr7wg,9893
@@ -80,21 +81,21 @@ dtlpy/entities/package_function.py,sha256=M42Kvw9A8b6msAkv-wRNAQg_-UC2bejniCjeKD
80
81
  dtlpy/entities/package_module.py,sha256=cOkIITATkzzCQpE0sdPiBUisAz8ImlPG2YGZ0K7SypA,5151
81
82
  dtlpy/entities/package_slot.py,sha256=XBwCodQe618sQm0bmx46Npo94mEk-zUV7ZX0mDRcsD8,3946
82
83
  dtlpy/entities/paged_entities.py,sha256=6y44H3FSclQvhB1KLI4zuIs317hWOhdHUynldRrUJkE,5913
83
- dtlpy/entities/pipeline.py,sha256=OrRybxEa29S4sKtl7RTdf6kRgnQi90n4wlN4OsMJJLk,20671
84
+ dtlpy/entities/pipeline.py,sha256=X9238WbMGfZcXdQVEtkw8twZwl0O4EZB4TxbTSEyPeI,20788
84
85
  dtlpy/entities/pipeline_execution.py,sha256=XCXlBAHFYVL2HajE71hK-bPxI4gTwZvg5SKri4BgyRA,9928
85
86
  dtlpy/entities/project.py,sha256=ZUx8zA3mr6N145M62R3UDPCCzO1vxfyWO6vjES-bO-g,14653
86
87
  dtlpy/entities/prompt_item.py,sha256=Kmvguz3f0sGtkKZS9OEA_-Yi4aQRCgdg1GBkaLQyyTg,19592
87
88
  dtlpy/entities/recipe.py,sha256=Q1HtYgind3bEe-vnDZWhw6H-rcIAGhkGHPRWtLIkPSE,11917
88
89
  dtlpy/entities/reflect_dict.py,sha256=2NaSAL-CO0T0FYRYFQlaSpbsoLT2Q18AqdHgQSLX5Y4,3273
89
90
  dtlpy/entities/resource_execution.py,sha256=1HuVV__U4jAUOtOkWlWImnM3Yts8qxMSAkMA9sBhArY,5033
90
- dtlpy/entities/service.py,sha256=ZV3HhBbafs0N_lSIWxu4CNJ39WThd7z5GAd0fCvSnFg,32462
91
+ dtlpy/entities/service.py,sha256=3A_kcEUCbaS-Qx31rfNyThYK7OxUrzHiE6shT0Oxh60,33467
91
92
  dtlpy/entities/setting.py,sha256=uXagJHtcCR3nJYClR_AUGZjz_kx3TejPcUZ8ginHFIA,8561
92
93
  dtlpy/entities/task.py,sha256=XHiEqZYFlrDCtmw1MXsysjoBLdIzAk7coMrVk8bNIiE,19534
93
94
  dtlpy/entities/time_series.py,sha256=336jWNckjuSn0G29WJFetB7nBoFAKqs4VH9_IB4m4FE,4017
94
95
  dtlpy/entities/trigger.py,sha256=zh3wYUY2-zATh_7ous0Ck87Yojo9r9PAVQrkcESxoko,14266
95
96
  dtlpy/entities/user.py,sha256=hqEzwN6rl1oUTpKOV5eXvw9Z7dtpsiC4TAPSNBmkqcM,3865
96
97
  dtlpy/entities/webhook.py,sha256=6R06MgLxabvKySInGlSJmaf0AVmAMe3vKusWhqONRyU,3539
97
- dtlpy/entities/annotation_definitions/__init__.py,sha256=Y_L9JGbRoQQKkoPMCUnLotbytgYK6244WtR8sySmINQ,636
98
+ dtlpy/entities/annotation_definitions/__init__.py,sha256=qZ77hGmCQopPSpiDHYhNWbNKC7nrn10NWNlim9dINmg,666
98
99
  dtlpy/entities/annotation_definitions/base_annotation_definition.py,sha256=BXlTElzhjQ4zVwwz4i2XchDzXMLKsz_P3gwyQlsFdBM,2524
99
100
  dtlpy/entities/annotation_definitions/box.py,sha256=kNT_Ba7QWKBiyt1uPAmYLyBfPsxvIUNLhVe9042WFnM,8622
100
101
  dtlpy/entities/annotation_definitions/classification.py,sha256=uqLAAaqNww2ZwR1e4UW22foJtDxoeZXJsv5PTvyt-tA,1559
@@ -104,6 +105,7 @@ dtlpy/entities/annotation_definitions/cube_3d.py,sha256=PTCXoyVfBo3YrCiMDqcBMuTc
104
105
  dtlpy/entities/annotation_definitions/description.py,sha256=2Gh9zF-a2EsLKf4-E3LcftAo2sZ0z-zBGMi5ZnhMY1s,919
105
106
  dtlpy/entities/annotation_definitions/ellipse.py,sha256=J9jEc3xke7XD5X_SdiSEsdDIOfxIN0KnInQ0P0J8QrI,4054
106
107
  dtlpy/entities/annotation_definitions/free_text.py,sha256=KVESLAEXoHe1StA99ri-hYCjoIiSMLCkpKbdGFm_DFg,1631
108
+ dtlpy/entities/annotation_definitions/gis.py,sha256=D6eb5Hr5yvKvbi0Dnq4YGc6uytGuVk9Y1jHpAg5QCEg,2079
107
109
  dtlpy/entities/annotation_definitions/note.py,sha256=4jHJhC4e2peZ5LSb7DdU50XqBYR9R6ytyHG2vV2mx98,4105
108
110
  dtlpy/entities/annotation_definitions/point.py,sha256=IkrFlTAIPk8dz_Qkr65L-arLTlkIA3-cebrKcwDbhaM,3271
109
111
  dtlpy/entities/annotation_definitions/polygon.py,sha256=gI28yzvYgDj_js6bU9djAzswXwOBdc5GiauAnJa0M78,6346
@@ -146,14 +148,14 @@ dtlpy/miscellaneous/list_print.py,sha256=leEg3RodgYfH5t_0JG8VuM8NiesR8sJLK_mRStt
146
148
  dtlpy/miscellaneous/zipping.py,sha256=GMdPhAeHQXeMS5ClaiKWMJWVYQLBLAaJUWxvdYrL4Ro,5337
147
149
  dtlpy/ml/__init__.py,sha256=vPkyXpc9kcWWZ_PxyPEOsjKBJdEbowLkZr8FZIb_OBM,799
148
150
  dtlpy/ml/base_feature_extractor_adapter.py,sha256=iiEGYAx0Rdn4K46H_FlKrAv3ebTXHSxNVAmio0BxhaI,1178
149
- dtlpy/ml/base_model_adapter.py,sha256=mcq_1ELAcJ6xzqYg_U0E3rOD-rJumgSu8YeSL9R7czc,50901
151
+ dtlpy/ml/base_model_adapter.py,sha256=DCf9anJLrxh-kBr1WdgXtx84oqBlAue87QV_1V-260E,50881
150
152
  dtlpy/ml/metrics.py,sha256=BG2E-1Mvjv2e2No9mIJKVmvzqBvLqytKcw3hA7wVUNc,20037
151
153
  dtlpy/ml/predictions_utils.py,sha256=He_84U14oS2Ss7T_-Zj5GDiBZwS-GjMPURUh7u7DjF8,12484
152
154
  dtlpy/ml/summary_writer.py,sha256=dehDi8zmGC1sAGyy_3cpSWGXoGQSiQd7bL_Thoo8yIs,2784
153
155
  dtlpy/ml/train_utils.py,sha256=R-BHKRfqDoLLhFyLzsRFyJ4E-8iedj9s9oZqy3IO2rg,2404
154
156
  dtlpy/repositories/__init__.py,sha256=aBWg6mayTAy6CtfSPLxyT_Uae7hQyNTILI7sRLKNEPU,1996
155
157
  dtlpy/repositories/analytics.py,sha256=dQPCYTPAIuyfVI_ppR49W7_GBj0033feIm9Gd7LW1V0,2966
156
- dtlpy/repositories/annotations.py,sha256=E7iHo8UwDAhdulqh0lGr3fGQ-TSwZXXGsEXZA-WJ_NA,35780
158
+ dtlpy/repositories/annotations.py,sha256=b6Y9K9Yj_EaavMMrdtDG0QfhsLpz0lYpwMecTaNPmG4,42453
157
159
  dtlpy/repositories/apps.py,sha256=J-PDCPWVtvTLmzzkABs2-8zo9hGLk_z_sNR2JB1mB0c,15752
158
160
  dtlpy/repositories/artifacts.py,sha256=Ke2ustTNw-1eQ0onLsWY7gL2aChjXPAX5p1uQ_EzMbo,19081
159
161
  dtlpy/repositories/assignments.py,sha256=1VwJZ7ctQe1iaDDDpeYDgoj2G-TCgzolVLUEqUocd2w,25506
@@ -166,36 +168,36 @@ dtlpy/repositories/datasets.py,sha256=rDpJXNyxOlJwDQB-wNkM-JIqOGH10q9nujnAl6y8_x
166
168
  dtlpy/repositories/downloader.py,sha256=pNwL7Nid8xmOyYNiv4DB_WY4RoKlxQ-U9nG2V99Gyr8,41342
167
169
  dtlpy/repositories/dpks.py,sha256=mj3QPvfzj_jZAscwIgpKUfa7fLxptc3OJQ_RrSfgYxo,17487
168
170
  dtlpy/repositories/drivers.py,sha256=fF0UuHCyBzop8pHfryex23mf0kVFAkqzNdOmwBbaWxY,10204
169
- dtlpy/repositories/executions.py,sha256=M84nhpFPPZq4fQeJ2m_sv6JT4NE2WDRMOXWr451J0bU,30403
171
+ dtlpy/repositories/executions.py,sha256=4UoU6bnB3kl5cMuF1eJvDecfZCaB06gKWxPfv6_g1_k,32598
170
172
  dtlpy/repositories/feature_sets.py,sha256=UowMDAl_CRefRB5oZzubnsjU_OFgiPPdQXn8q2j4Kuw,9666
171
173
  dtlpy/repositories/features.py,sha256=7xA2ihEuNgZD7HBQMMGLWpsS2V_3PgieKW2YAk1OeUU,9712
172
174
  dtlpy/repositories/integrations.py,sha256=Wi-CpT2PH36GFu3znWP5Uf2CmkqWBUYyOdwvatGD_eM,11798
173
- dtlpy/repositories/items.py,sha256=DqJ3g9bc4OLMm9KqI-OebXbr-zcEiohO1wGZJ1uE2Lg,37874
174
- dtlpy/repositories/messages.py,sha256=zYcoz8Us6j8Tb5Z7luJuvtO9xSRTuOCS7pl-ztt97Ac,3082
175
+ dtlpy/repositories/items.py,sha256=90Z8-thLWBd49fmmnP-P6pZxhHX1k4Wv6Qfxq-Ovcz4,38092
176
+ dtlpy/repositories/messages.py,sha256=QU0Psckg6CA_Tlw9AVxqa-Ay1fRM4n269sSIJkH9o7E,3066
175
177
  dtlpy/repositories/models.py,sha256=GdVWHJ6kOIxM01wH7RVQ3CVaR4OmGurWJdQVHZezLDM,34789
176
178
  dtlpy/repositories/nodes.py,sha256=xXJm_YA0vDUn0dVvaGeq6ORM0vI3YXvfjuylvGRtkxo,3061
177
179
  dtlpy/repositories/ontologies.py,sha256=unnMhD2isR9DVE5S8Fg6fSDf1ZZ5Xemxxufx4LEUT3w,19577
178
180
  dtlpy/repositories/organizations.py,sha256=6ijUDFbsogfRul1g_vUB5AZOb41MRmV5NhNU7WLHt3A,22825
179
181
  dtlpy/repositories/packages.py,sha256=QhkXMZkpseCt0pDropJuqoHJL0RMa5plk8AN0V3w6Nk,86807
180
- dtlpy/repositories/pipeline_executions.py,sha256=zQNRejj23r5q1cSp88KMoeOGUOUbbg3Yi-ER7qZfyF8,16781
182
+ dtlpy/repositories/pipeline_executions.py,sha256=hJX2I939c-bWxveVdikZ_9LWMNCQusTRkkdEa5j3Yvo,17007
181
183
  dtlpy/repositories/pipelines.py,sha256=VDAOsGbgD1_AKdMrJl_qB3gxPs7f3pwUnPx0pT1iAWk,23977
182
184
  dtlpy/repositories/projects.py,sha256=tZyFLqVs-8ggTIi5echlX7XdGOJGW4LzKuXke7jkRnw,22140
183
185
  dtlpy/repositories/recipes.py,sha256=ZZDhHn9g28C99bsf0nFaIpVYn6f6Jisz9upkHEkeaYY,15843
184
186
  dtlpy/repositories/resource_executions.py,sha256=PyzsbdJxz6jf17Gx13GZmqdu6tZo3TTVv-DypnJ_sY0,5374
185
187
  dtlpy/repositories/schema.py,sha256=kTKDrbwm7BfQnBAK81LpAl9ChNFdyUweSLNazlJJhjk,3953
186
- dtlpy/repositories/services.py,sha256=8hu6CgIyGQHOOlBmZmmM-oY8i-adU_99lSN46FGvvkc,67421
188
+ dtlpy/repositories/services.py,sha256=kG2CW4xKKO5mbFv93Km-6gxq-Zq6-6GToEtQQ0R-KWA,67489
187
189
  dtlpy/repositories/settings.py,sha256=pvqNse0ANCdU3NSLJEzHco-PZq__OIsPSPVJveB9E4I,12296
188
- dtlpy/repositories/tasks.py,sha256=nA3rODvS8Q361xDmPXII-VPzktzoxbAApxTkzC5wv4M,48601
190
+ dtlpy/repositories/tasks.py,sha256=v09S2pYGkKx_vBG7SWigJeuMhp0GsefKo3Td7ImrWb0,49039
189
191
  dtlpy/repositories/times_series.py,sha256=m-bKFEgiZ13yQNelDjBfeXMUy_HgsPD_JAHj1GVx9fU,11420
190
192
  dtlpy/repositories/triggers.py,sha256=izdNyCN1gDc5uo7AXntso0HSMTDIzGFUp-dSEz8cn_U,21990
191
193
  dtlpy/repositories/upload_element.py,sha256=4CDZRKLubanOP0ZyGwxAHTtl6GLzwAyRAIm-PLYt0ck,10140
192
- dtlpy/repositories/uploader.py,sha256=iOlDYWIMy_h1Rbd7Mfug1I0e93dBJ0SxqP_BOwqYQPQ,30697
194
+ dtlpy/repositories/uploader.py,sha256=SW3mJHFgd5JgYUhwTwm63xXFQ8DB97-bIzc3Fk9BYMU,31219
193
195
  dtlpy/repositories/webhooks.py,sha256=IIpxOJ-7KeQp1TY9aJZz-FuycSjAoYx0TDk8z86KAK8,9033
194
196
  dtlpy/services/__init__.py,sha256=VfVJy2otIrDra6i7Sepjyez2ujiE6171ChQZp-YgxsM,904
195
197
  dtlpy/services/aihttp_retry.py,sha256=tgntZsAY0dW9v08rkjX1T5BLNDdDd8svtgn7nH8DSGU,5022
196
- dtlpy/services/api_client.py,sha256=DBelaW5qpZoX7vQXjgLL2xTcTwUqJodZ901g0C3Panw,68331
198
+ dtlpy/services/api_client.py,sha256=DRGSi2gTbgLh_LR0vhwWh3f-tYuJql6VKL58Ov1Iqug,69478
197
199
  dtlpy/services/api_reference.py,sha256=cW-B3eoi9Xs3AwI87_Kr6GV_E6HPoC73aETFaGz3A-0,1515
198
- dtlpy/services/async_utils.py,sha256=lfpkTkRUvQoMTxaRZBHbPt5e43qdvpCGDe_-KcY2Jps,2810
200
+ dtlpy/services/async_utils.py,sha256=bVz7PLCpnldyQXMKTPahnQqAudxNW1-c71nfMjcI41Q,2858
199
201
  dtlpy/services/calls_counter.py,sha256=gr0io5rIsO5-7Cgc8neA1vK8kUtYhgFPmDQ2jXtiZZs,1036
200
202
  dtlpy/services/check_sdk.py,sha256=tnFWCzkJa8w2jLtw-guwuqpOtXGyiVU7ZCDFiUZUqzY,3593
201
203
  dtlpy/services/cookie.py,sha256=sSZR1QV4ienCcZ8lEK_Y4nZYBgAxO3kHrcBXFKGcmwQ,3694
@@ -221,19 +223,19 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
221
223
  dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
222
224
  dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
223
225
  dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
224
- dtlpy-1.95.6.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
225
- dtlpy-1.95.6.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
226
- dtlpy-1.95.6.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
226
+ dtlpy-1.98.8.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
227
+ dtlpy-1.98.8.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
228
+ dtlpy-1.98.8.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
227
229
  tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
228
230
  tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
229
231
  tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
230
- tests/assets/models_flow/main.py,sha256=87O3-JaWcC6m_kA39sqPhX70_VCBzzbLWmX2YQFilJw,1873
232
+ tests/assets/models_flow/main.py,sha256=xotAjdHpFnIic3Wb-4f7GSg2igtuXZjvRPiYdCTawhA,2064
231
233
  tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
232
234
  tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
233
235
  tests/features/environment.py,sha256=V23cUx_p4VpNk9kc2I0BDZJHO_xcJBFJq8m3JlYCooc,16736
234
- dtlpy-1.95.6.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
235
- dtlpy-1.95.6.dist-info/METADATA,sha256=gwEWuQCr9AOla7PZkit6MzkeFa6pW626gXqni3GPKVM,3002
236
- dtlpy-1.95.6.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
237
- dtlpy-1.95.6.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
238
- dtlpy-1.95.6.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
239
- dtlpy-1.95.6.dist-info/RECORD,,
236
+ dtlpy-1.98.8.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
237
+ dtlpy-1.98.8.dist-info/METADATA,sha256=21j22oI6cRAaGPEiDzFoctdPwBRHrm3wGwQlrVCiB90,3002
238
+ dtlpy-1.98.8.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
239
+ dtlpy-1.98.8.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
240
+ dtlpy-1.98.8.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
241
+ dtlpy-1.98.8.dist-info/RECORD,,
@@ -49,3 +49,10 @@ class ModelAdapter(dl.BaseModelAdapter):
49
49
 
50
50
  def convert_from_dtlpy(self, data_path, **kwargs):
51
51
  logger.info("convert_from_dtlpy")
52
+
53
+ def embed(self, batch, **kwargs):
54
+ logger.info("embed model")
55
+ embeddings = []
56
+ for item in batch:
57
+ embeddings.append([1,2,3,4])
58
+ return embeddings
File without changes
File without changes
File without changes
File without changes