dtlpy 1.96.8__py3-none-any.whl → 1.99.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. dtlpy/__init__.py +3 -2
  2. dtlpy/__version__.py +1 -1
  3. dtlpy/entities/__init__.py +2 -1
  4. dtlpy/entities/annotation.py +3 -0
  5. dtlpy/entities/annotation_definitions/__init__.py +1 -0
  6. dtlpy/entities/annotation_definitions/gis.py +69 -0
  7. dtlpy/entities/app.py +1 -1
  8. dtlpy/entities/dpk.py +1 -0
  9. dtlpy/entities/filters.py +1 -1
  10. dtlpy/entities/gis_item.py +108 -0
  11. dtlpy/entities/model.py +23 -3
  12. dtlpy/entities/pipeline.py +22 -11
  13. dtlpy/entities/prompt_item.py +13 -21
  14. dtlpy/ml/base_model_adapter.py +1 -1
  15. dtlpy/new_instance.py +2 -2
  16. dtlpy/repositories/annotations.py +1 -1
  17. dtlpy/repositories/downloader.py +159 -118
  18. dtlpy/repositories/items.py +5 -0
  19. dtlpy/repositories/messages.py +2 -2
  20. dtlpy/repositories/models.py +61 -2
  21. dtlpy/repositories/pipeline_executions.py +24 -10
  22. dtlpy/repositories/services.py +2 -0
  23. dtlpy/repositories/upload_element.py +11 -0
  24. dtlpy/repositories/uploader.py +26 -16
  25. dtlpy/services/api_client.py +10 -11
  26. dtlpy/services/async_utils.py +25 -6
  27. {dtlpy-1.96.8.dist-info → dtlpy-1.99.11.dist-info}/METADATA +1 -1
  28. {dtlpy-1.96.8.dist-info → dtlpy-1.99.11.dist-info}/RECORD +36 -34
  29. tests/assets/models_flow/main.py +7 -0
  30. {dtlpy-1.96.8.data → dtlpy-1.99.11.data}/scripts/dlp +0 -0
  31. {dtlpy-1.96.8.data → dtlpy-1.99.11.data}/scripts/dlp.bat +0 -0
  32. {dtlpy-1.96.8.data → dtlpy-1.99.11.data}/scripts/dlp.py +0 -0
  33. {dtlpy-1.96.8.dist-info → dtlpy-1.99.11.dist-info}/LICENSE +0 -0
  34. {dtlpy-1.96.8.dist-info → dtlpy-1.99.11.dist-info}/WHEEL +0 -0
  35. {dtlpy-1.96.8.dist-info → dtlpy-1.99.11.dist-info}/entry_points.txt +0 -0
  36. {dtlpy-1.96.8.dist-info → dtlpy-1.99.11.dist-info}/top_level.txt +0 -0
dtlpy/__init__.py CHANGED
@@ -68,7 +68,7 @@ from .entities import (
68
68
  Ontology, Label, Task, TaskPriority, ConsensusTaskType, Assignment, Service, Package, Codebase, Model,
69
69
  PackageModule, PackageFunction,
70
70
  # annotations
71
- Box, Cube, Cube3d, Point, Note, Message, Segmentation, Ellipse, Classification, Subtitle, Polyline, Pose,
71
+ Box, Cube, Cube3d, Point, Note, Message, Segmentation, Ellipse, Classification, Subtitle, Polyline, Pose, Gis, GisType,
72
72
  Description,
73
73
  Polygon, Text, FreeText, RefImage,
74
74
  # filters
@@ -93,7 +93,7 @@ from .entities import (
93
93
  Webhook, HttpMethod,
94
94
  ViewAnnotationOptions, AnnotationStatus, AnnotationType,
95
95
  ItemStatus, ExecutionStatus, ExportMetadata,
96
- PromptItem, Prompt, PromptType,
96
+ PromptItem, Prompt, PromptType, ItemGis, Layer,
97
97
  ItemLink, UrlLink, LinkTypeEnum,
98
98
  Modality, ModalityTypeEnum, ModalityRefTypeEnum,
99
99
  Workload, WorkloadUnit, ItemAction,
@@ -239,6 +239,7 @@ def checkout_state():
239
239
 
240
240
 
241
241
  def use_attributes_2(state: bool = True):
242
+ warnings.warn("Function 'use_attributes_2()' is deprecated as of version 1.99.11 and has been non-functional since version 1.90.39. To work with attributes 2.0, simply use 'update_attributes()'.", DeprecationWarning)
242
243
  client_api.attributes_mode.use_attributes_2 = state
243
244
 
244
245
 
dtlpy/__version__.py CHANGED
@@ -1 +1 @@
1
- version = '1.96.8'
1
+ version = '1.99.11'
@@ -34,7 +34,7 @@ from .recipe import Recipe
34
34
  from .ontology import Ontology, AttributesTypes, AttributesRange
35
35
  from .annotation_definitions import Box, Cube, Cube3d, Point, Segmentation, Polygon, Ellipse, Classification, \
36
36
  Subtitle, Text, FreeText, RefImage, \
37
- Polyline, Comparison, UndefinedAnnotationType, Note, Message, Description, Pose
37
+ Polyline, Comparison, UndefinedAnnotationType, Note, Message, Description, Pose, Gis, GisType
38
38
  from .label import Label
39
39
  from .codebase import Codebase, PackageCodebaseType, ItemCodebase, GitCodebase, FilesystemCodebase, LocalCodebase
40
40
  from .package import Package, RequirementOperator, PackageRequirement
@@ -78,3 +78,4 @@ from .prompt_item import Prompt, PromptItem, PromptType
78
78
  from .compute import ClusterProvider, ComputeType, ComputeStatus, Toleration, DeploymentResource, DeploymentResources, \
79
79
  NodePool, AuthenticationIntegration, Authentication, ComputeCluster, ComputeContext, Compute, KubernetesCompute, \
80
80
  ServiceDriver
81
+ from .gis_item import ItemGis, Layer
@@ -43,6 +43,7 @@ class AnnotationType(str, Enum):
43
43
  SEGMENTATION = "binary"
44
44
  SUBTITLE = "subtitle"
45
45
  TEXT = "text_mark"
46
+ GIS = "gis"
46
47
 
47
48
 
48
49
  class ViewAnnotationOptions(str, Enum):
@@ -1796,6 +1797,8 @@ class FrameAnnotation(entities.BaseEntity):
1796
1797
  annotation = entities.Note.from_json(_json)
1797
1798
  elif _json['type'] == 'pose':
1798
1799
  annotation = entities.Pose.from_json(_json)
1800
+ elif _json['type'] == 'gis':
1801
+ annotation = entities.Gis.from_json(_json)
1799
1802
  else:
1800
1803
  annotation = entities.UndefinedAnnotationType.from_json(_json)
1801
1804
  return annotation
@@ -17,3 +17,4 @@ from .pose import Pose
17
17
  from .text import Text
18
18
  from .free_text import FreeText
19
19
  from .ref_image import RefImage
20
+ from .gis import Gis, GisType
@@ -0,0 +1,69 @@
1
+ from . import BaseAnnotationDefinition
2
+
3
+
4
+ class GisType:
5
+ """
6
+ State enum
7
+ """
8
+ BOX = 'box'
9
+ POLYGON = 'polygon'
10
+ POLYLINE = 'polyline'
11
+ POINT = 'point'
12
+
13
+
14
+ class Gis(BaseAnnotationDefinition):
15
+ """
16
+ Box annotation object
17
+ Can create a box using 2 point using: "top", "left", "bottom", "right" (to form a box [(left, top), (right, bottom)])
18
+ For rotated box add the "angel"
19
+ """
20
+ type = "gis"
21
+
22
+ def __init__(self,
23
+ annotation_type: GisType,
24
+ geo,
25
+ label=None,
26
+ attributes=None,
27
+ description=None,
28
+ ):
29
+ """
30
+ Can create gis annotation using points:
31
+
32
+ :param geo: list of points
33
+ :param label: annotation label
34
+ :param attributes: a list of attributes for the annotation
35
+ :param description:
36
+
37
+ :return:
38
+ """
39
+ super().__init__(description=description, attributes=attributes)
40
+
41
+ if geo is None:
42
+ raise ValueError('geo must be provided')
43
+ if annotation_type is None:
44
+ raise ValueError('annotation_type must be provided')
45
+ self.label = label
46
+ self.annotation = None
47
+ self.geo = geo
48
+ self.annotation_type = annotation_type
49
+
50
+ def to_coordinates(self, color):
51
+ return {
52
+ "geo_type": self.annotation_type,
53
+ "wgs84_geo_coordinates": self.geo
54
+ }
55
+
56
+ @classmethod
57
+ def from_json(cls, _json):
58
+ json_coordinates = _json.get("coordinates", {}) if "coordinates" in _json else _json.get("data", {})
59
+ coordinates = json_coordinates.get("wgs84_geo_coordinates", None)
60
+ annotations_type = json_coordinates.get("geo_type", None)
61
+ if coordinates is None:
62
+ raise ValueError('can not find "coordinates" or "data" in annotation. id: {}'.format(_json["id"]))
63
+
64
+ return cls(
65
+ annotation_type=annotations_type,
66
+ geo=coordinates,
67
+ label=_json["label"],
68
+ attributes=_json.get("attributes", None)
69
+ )
dtlpy/entities/app.py CHANGED
@@ -199,7 +199,7 @@ class App(entities.BaseEntity):
199
199
  name=_json.get('name', None),
200
200
  url=_json.get('url', None),
201
201
  created_at=_json.get('createdAt', None),
202
- updated_at=_json.get('updateAt', None),
202
+ updated_at=_json.get('updatedAt', None),
203
203
  creator=_json.get('creator', None),
204
204
  project_id=_json.get('projectId', None),
205
205
  org_id=_json.get('orgId', None),
dtlpy/entities/dpk.py CHANGED
@@ -59,6 +59,7 @@ class Toolbar(entities.DlEntity):
59
59
 
60
60
  class Panel(entities.DlEntity):
61
61
  name = entities.DlProperty(location=['name'], _type=str)
62
+ path = entities.DlProperty(location=['path'], _type=str, default=None)
62
63
  min_role = entities.DlProperty(location=['minRole'], _type=list)
63
64
  supported_slots = entities.DlProperty(location=['supportedSlots'], _type=list)
64
65
 
dtlpy/entities/filters.py CHANGED
@@ -330,7 +330,7 @@ class Filters:
330
330
  self._unique_fields = ['type']
331
331
  self.add(field='type',
332
332
  values=['box', 'class', 'comparison', 'ellipse', 'point', 'segment', 'polyline', 'binary',
333
- 'subtitle', 'cube', 'cube_3d', 'pose', 'text_mark', 'text', 'ref_image'],
333
+ 'subtitle', 'cube', 'cube_3d', 'pose', 'text_mark', 'text', 'ref_image', 'gis'],
334
334
  operator=FiltersOperations.IN,
335
335
  method=FiltersMethod.AND)
336
336
 
@@ -0,0 +1,108 @@
1
+ import json
2
+ from typing import List
3
+ import logging
4
+ import os
5
+
6
+ logger = logging.getLogger(name='dtlpy')
7
+
8
+
9
+ class Layer:
10
+ def __init__(self, name, layer_type, url):
11
+ self.name = name
12
+ self.type = layer_type
13
+ self.url = url
14
+
15
+
16
+ class ItemGis:
17
+ def __init__(self,
18
+ name: str,
19
+ data: dict = None,
20
+ layer: Layer = None,
21
+ optional_layers: List[Layer] = None,
22
+ zoom: int = None,
23
+ min_zoom: int = None,
24
+ max_zoom: int = None,
25
+ epsg: str = None,
26
+ bounds: list = None,
27
+ aoi: list = None):
28
+ self.name = name
29
+
30
+ self.layer = layer or Layer(name=data.get('name', None), layer_type=data.get('type', None),
31
+ url=data.get('url', None))
32
+ if self.layer is None:
33
+ raise ValueError('layer is required')
34
+ elif self.layer is not None and isinstance(self.layer, dict):
35
+ self.layer = Layer(name=self.layer.get('name', None), layer_type=self.layer.get('type', None), url=self.layer.get('url', None))
36
+
37
+
38
+ self.optional_layers = optional_layers or [
39
+ Layer(name=layer.get('name', None), layer_type=layer.get('type', None), url=layer.get('url', None)) for
40
+ layer in data.get('optionalLayers', [])]
41
+
42
+ if self.optional_layers is not None and isinstance(optional_layers, list):
43
+ new_optional_layers = []
44
+ for op_layer in self.optional_layers:
45
+ if isinstance(op_layer, dict):
46
+ new_optional_layers.append(Layer(name=op_layer.get('name', None), layer_type=op_layer.get('type', None), url=op_layer.get('url', None)))
47
+ else:
48
+ new_optional_layers.append(op_layer)
49
+ self.optional_layers = new_optional_layers
50
+
51
+ self.epsg = epsg or data.get('epsg', None)
52
+ if self.epsg is None:
53
+ raise ValueError('epsg is required')
54
+
55
+ self.zoom = zoom or data.get('zoom', None)
56
+ self.min_zoom = min_zoom or data.get('minZoom', None)
57
+ self.max_zoom = max_zoom or data.get('maxZoom', None)
58
+ self.bounds = bounds or data.get('bounds', None)
59
+ self.aoi = aoi or data.get('aoi', None)
60
+
61
+ def to_json(self):
62
+ _json = {
63
+ "type": "gis",
64
+ "shebang": "dataloop",
65
+ "metadata": {
66
+ "dltype": "gis"
67
+ },
68
+ 'layer': {
69
+ 'name': self.layer.name,
70
+ 'type': self.layer.type,
71
+ 'url': self.layer.url
72
+ },
73
+ "epsg": self.epsg
74
+ }
75
+ if self.optional_layers is not None:
76
+ _json['optionalLayers'] = [
77
+ {
78
+ 'name': layer.name,
79
+ 'type': layer.type,
80
+ 'url': layer.url
81
+ } for layer in self.optional_layers
82
+ ]
83
+ if self.zoom is not None:
84
+ _json['zoom'] = self.zoom
85
+ if self.min_zoom is not None:
86
+ _json['minZoom'] = self.min_zoom
87
+ if self.max_zoom is not None:
88
+ _json['maxZoom'] = self.max_zoom
89
+ if self.bounds is not None:
90
+ _json['bounds'] = self.bounds
91
+ if self.aoi is not None:
92
+ _json['aoi'] = self.aoi
93
+ return _json
94
+
95
+ @classmethod
96
+ def from_local_file(cls, filepath):
97
+ """
98
+ Create a new prompt item from a file
99
+ :param filepath: path to the file
100
+ :return: PromptItem object
101
+ """
102
+ if os.path.exists(filepath) is False:
103
+ raise FileNotFoundError(f'File does not exists: {filepath}')
104
+ if 'json' not in os.path.splitext(filepath)[-1]:
105
+ raise ValueError(f'Expected path to json item, got {os.path.splitext(filepath)[-1]}')
106
+ with open(filepath, 'r', encoding='utf-8') as f:
107
+ data = json.load(f)
108
+ return cls(name=os.path.basename(filepath), data=data)
dtlpy/entities/model.py CHANGED
@@ -307,7 +307,7 @@ class Model(entities.BaseEntity):
307
307
  @_repositories.default
308
308
  def set_repositories(self):
309
309
  reps = namedtuple('repositories',
310
- field_names=['projects', 'datasets', 'packages', 'models', 'ontologies', 'artifacts',
310
+ field_names=['projects', 'datasets', 'models', 'packages', 'ontologies', 'artifacts',
311
311
  'metrics', 'dpks', 'services'])
312
312
 
313
313
  r = reps(projects=repositories.Projects(client_api=self._client_api),
@@ -552,14 +552,34 @@ class Model(entities.BaseEntity):
552
552
  filters=filters,
553
553
  service_config=service_config)
554
554
 
555
- def predict(self, item_ids):
555
+ def predict(self, item_ids=None, dataset_id=None):
556
556
  """
557
557
  Run model prediction with items
558
558
 
559
559
  :param item_ids: a list of item id to run the prediction.
560
+ :param dataset_id: dataset id to run the prediction on
560
561
  :return:
561
562
  """
562
- return self.models.predict(model=self, item_ids=item_ids)
563
+ return self.models.predict(model=self, item_ids=item_ids, dataset_id=dataset_id)
564
+
565
+ def embed(self, item_ids):
566
+ """
567
+ Run model embed with items
568
+
569
+ :param item_ids: a list of item id to run the embed.
570
+ :return:
571
+ """
572
+ return self.models.embed(model=self, item_ids=item_ids)
573
+
574
+ def embed_datasets(self, dataset_ids, attach_trigger=False):
575
+ """
576
+ Run model embed with datasets
577
+
578
+ :param dataset_ids: a list of dataset id to run the embed.
579
+ :param attach_trigger: bool - True, if you want to activate the trigger
580
+ :return:
581
+ """
582
+ return self.models.embed_datasets(model=self, dataset_ids=dataset_ids, attach_trigger=attach_trigger)
563
583
 
564
584
  def deploy(self, service_config=None) -> entities.Service:
565
585
  """
@@ -488,26 +488,35 @@ class Pipeline(entities.BaseEntity):
488
488
  """
489
489
  return self.pipelines.pause(pipeline=self, keep_triggers_active=keep_triggers_active)
490
490
 
491
- def execute(self, execution_input=None):
491
+ def execute(self, execution_input=None, node_id: str = None):
492
492
  """
493
493
  execute a pipeline and return to execute
494
494
 
495
495
  :param execution_input: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}
496
+ :param str node_id: node id to execute
496
497
  :return: entities.PipelineExecution object
497
498
  """
498
- execution = self.pipeline_executions.create(pipeline_id=self.id, execution_input=execution_input)
499
+ execution = self.pipeline_executions.create(
500
+ pipeline_id=self.id,
501
+ execution_input=execution_input,
502
+ node_id=node_id
503
+ )
499
504
  return execution
500
505
 
501
- def execute_batch(self,
502
- filters,
503
- execution_inputs=None,
504
- wait=True):
506
+ def execute_batch(
507
+ self,
508
+ filters,
509
+ execution_inputs=None,
510
+ wait=True,
511
+ node_id: str = None
512
+ ):
505
513
  """
506
514
  execute a pipeline and return to execute
507
515
 
508
516
  :param execution_inputs: list of the dl.FunctionIO or dict of pipeline input - example {'item': 'item_id'}, that represent the extra inputs of the function
509
517
  :param filters: Filters entity for a filtering before execute
510
518
  :param bool wait: wait until create task finish
519
+ :param str node_id: node id to execute
511
520
  :return: entities.PipelineExecution object
512
521
 
513
522
  **Example**:
@@ -518,10 +527,13 @@ class Pipeline(entities.BaseEntity):
518
527
  execution_inputs=dl.FunctionIO(type=dl.PackageInputType.STRING, value='test', name='string'),
519
528
  filters=dl.Filters(field='dir', values='/test', context={'datasets': [dataset.id]))
520
529
  """
521
- command = self.pipeline_executions.create_batch(pipeline_id=self.id,
522
- execution_inputs=execution_inputs,
523
- filters=filters,
524
- wait=wait)
530
+ command = self.pipeline_executions.create_batch(
531
+ pipeline_id=self.id,
532
+ execution_inputs=execution_inputs,
533
+ filters=filters,
534
+ wait=wait,
535
+ node_id=node_id
536
+ )
525
537
  return command
526
538
 
527
539
  def reset(self, stop_if_running: bool = False):
@@ -578,4 +590,3 @@ class Pipeline(entities.BaseEntity):
578
590
  for variable in self.variables:
579
591
  if variable.name in keys:
580
592
  variable.value = kwargs[variable.name]
581
-
@@ -347,7 +347,6 @@ class PromptItem:
347
347
  def add(self,
348
348
  message: dict,
349
349
  prompt_key: str = None,
350
- stream: bool = True,
351
350
  model_info: dict = None):
352
351
  """
353
352
  add a prompt to the prompt item
@@ -356,29 +355,28 @@ class PromptItem:
356
355
 
357
356
  :param message:
358
357
  :param prompt_key:
359
- :param stream:
360
358
  :param model_info:
361
359
  :return:
362
360
  """
363
- if prompt_key is None:
364
- prompt_key = len(self.prompts) + 1
365
361
  role = message.get('role', 'user')
366
362
  content = message.get('content', list())
367
363
 
368
364
  if self.role_mapping.get(role, 'item') == 'item':
365
+ if prompt_key is None:
366
+ prompt_key = str(len(self.prompts) + 1)
369
367
  # for new prompt we need a new key
370
- prompt = Prompt(key=str(prompt_key), role=role)
368
+ prompt = Prompt(key=prompt_key, role=role)
371
369
  for element in content:
372
370
  prompt.add_element(value=element.get('value', ''),
373
371
  mimetype=element.get('mimetype', PromptType.TEXT))
374
372
 
375
373
  # create new prompt and add to prompts
376
374
  self.prompts.append(prompt)
377
- if self._item is not None and stream is True:
375
+ if self._item is not None:
378
376
  self._item._Item__update_item_binary(_json=self.to_json())
379
377
  else:
380
- # for response - we need to assign to previous key
381
- prompt_key = str(prompt_key - 1)
378
+ if prompt_key is None:
379
+ prompt_key = str(len(self.prompts))
382
380
  assistant_message = content[0]
383
381
  assistant_mimetype = assistant_message.get('mimetype', PromptType.TEXT)
384
382
  uploaded_annotation = None
@@ -421,21 +419,19 @@ class PromptItem:
421
419
  prompt.add_element(mimetype=PromptType.METADATA,
422
420
  value={"model_info": model_info})
423
421
 
424
- if stream:
425
- existing_annotation = entities.Annotation.new(item=self._item,
426
- metadata=metadata,
427
- annotation_definition=annotation_definition)
428
- uploaded_annotation = existing_annotation.upload()
429
- prompt.add_element(mimetype=PromptType.METADATA,
430
- value={"id": uploaded_annotation.id})
422
+ existing_annotation = entities.Annotation.new(item=self._item,
423
+ metadata=metadata,
424
+ annotation_definition=annotation_definition)
425
+ uploaded_annotation = existing_annotation.upload()
426
+ prompt.add_element(mimetype=PromptType.METADATA,
427
+ value={"id": uploaded_annotation.id})
431
428
  existing_prompt = prompt
432
429
  self.assistant_prompts.append(prompt)
433
430
 
434
- # TODO Shadi fix
435
431
  existing_prompt_element = [element for element in existing_prompt.elements if
436
432
  element['mimetype'] != PromptType.METADATA][-1]
437
433
  existing_prompt_element['value'] = assistant_message.get('value')
438
- if stream is True and uploaded_annotation is None:
434
+ if uploaded_annotation is None:
439
435
  # Creating annotation with old dict to match platform dict
440
436
  annotation_definition = entities.FreeText(text='')
441
437
  metadata = {'system': {'promptId': prompt_key},
@@ -451,7 +447,3 @@ class PromptItem:
451
447
  # update the annotation with the new text
452
448
  annotation.annotation_definition.text = existing_prompt_element['value']
453
449
  self._item.annotations.update(annotation)
454
-
455
- def update(self):
456
- if self._item is not None:
457
- self._item._Item__update_item_binary(_json=self.to_json())
@@ -271,7 +271,7 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
271
271
  :param bool overwrite: overwrite the data path (download again). default is False
272
272
  """
273
273
  # define paths
274
- dataloop_path = os.path.join(os.path.expanduser('~'), '.dataloop')
274
+ dataloop_path = service_defaults.DATALOOP_PATH
275
275
  root_path = self.adapter_defaults.resolve("root_path", root_path)
276
276
  data_path = self.adapter_defaults.resolve("data_path", data_path)
277
277
  output_path = self.adapter_defaults.resolve("output_path", output_path)
dtlpy/new_instance.py CHANGED
@@ -13,7 +13,7 @@ class Dtlpy:
13
13
  Ontology, Label, Task, TaskPriority, ConsensusTaskType, Assignment, Service, Package, Codebase, Model,
14
14
  PackageModule, PackageFunction,
15
15
  # annotations
16
- Box, Cube, Cube3d, Point, Note, Message, Segmentation, Ellipse, Classification, Subtitle, Polyline, Pose,
16
+ Box, Cube, Cube3d, Point, Note, Message, Segmentation, Ellipse, Classification, Subtitle, Polyline, Pose, Gis, GisType,
17
17
  Description,
18
18
  Polygon, Text, FreeText, RefImage,
19
19
  # filters
@@ -38,7 +38,7 @@ class Dtlpy:
38
38
  Webhook, HttpMethod,
39
39
  ViewAnnotationOptions, AnnotationStatus, AnnotationType,
40
40
  ItemStatus, ExecutionStatus, ExportMetadata,
41
- PromptItem, Prompt, PromptType,
41
+ PromptItem, Prompt, PromptType, ItemGis, Layer,
42
42
  ItemLink, UrlLink, LinkTypeEnum,
43
43
  Modality, ModalityTypeEnum, ModalityRefTypeEnum,
44
44
  Workload, WorkloadUnit, ItemAction,
@@ -312,7 +312,7 @@ class Annotations:
312
312
  """
313
313
  # get item's annotations
314
314
  annotations = self.list()
315
- if 'text' in self.item.metadata.get('system').get('mimetype', ''):
315
+ if 'text' in self.item.metadata.get('system').get('mimetype', '') or 'json' in self.item.metadata.get('system').get('mimetype', ''):
316
316
  annotation_format = entities.ViewAnnotationOptions.JSON
317
317
  elif 'audio' not in self.item.metadata.get('system').get('mimetype', ''):
318
318
  # height/weight