dtlpy 1.90.39__py3-none-any.whl → 1.91.37__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__version__.py +1 -1
- dtlpy/assets/lock_open.png +0 -0
- dtlpy/entities/analytic.py +118 -98
- dtlpy/entities/annotation.py +22 -31
- dtlpy/entities/annotation_collection.py +19 -21
- dtlpy/entities/app.py +13 -3
- dtlpy/entities/assignment.py +6 -0
- dtlpy/entities/base_entity.py +0 -23
- dtlpy/entities/dataset.py +1 -1
- dtlpy/entities/dpk.py +15 -0
- dtlpy/entities/execution.py +13 -1
- dtlpy/entities/filters.py +85 -6
- dtlpy/entities/integration.py +1 -1
- dtlpy/entities/item.py +26 -0
- dtlpy/entities/node.py +38 -2
- dtlpy/entities/ontology.py +61 -0
- dtlpy/entities/package_function.py +2 -0
- dtlpy/entities/package_module.py +13 -0
- dtlpy/entities/pipeline_execution.py +14 -6
- dtlpy/entities/prompt_item.py +10 -0
- dtlpy/entities/recipe.py +37 -0
- dtlpy/entities/service.py +31 -2
- dtlpy/ml/base_model_adapter.py +92 -2
- dtlpy/repositories/apps.py +12 -12
- dtlpy/repositories/assignments.py +1 -1
- dtlpy/repositories/datasets.py +1 -1
- dtlpy/repositories/dpks.py +29 -0
- dtlpy/repositories/executions.py +27 -30
- dtlpy/repositories/features.py +4 -1
- dtlpy/repositories/packages.py +6 -3
- dtlpy/repositories/pipeline_executions.py +5 -5
- dtlpy/repositories/services.py +28 -7
- dtlpy/repositories/tasks.py +8 -2
- dtlpy/repositories/uploader.py +2 -2
- dtlpy/services/api_client.py +15 -9
- {dtlpy-1.90.39.dist-info → dtlpy-1.91.37.dist-info}/METADATA +2 -2
- {dtlpy-1.90.39.dist-info → dtlpy-1.91.37.dist-info}/RECORD +45 -45
- tests/features/environment.py +38 -1
- {dtlpy-1.90.39.data → dtlpy-1.91.37.data}/scripts/dlp +0 -0
- {dtlpy-1.90.39.data → dtlpy-1.91.37.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.90.39.data → dtlpy-1.91.37.data}/scripts/dlp.py +0 -0
- {dtlpy-1.90.39.dist-info → dtlpy-1.91.37.dist-info}/LICENSE +0 -0
- {dtlpy-1.90.39.dist-info → dtlpy-1.91.37.dist-info}/WHEEL +0 -0
- {dtlpy-1.90.39.dist-info → dtlpy-1.91.37.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.90.39.dist-info → dtlpy-1.91.37.dist-info}/top_level.txt +0 -0
dtlpy/entities/dpk.py
CHANGED
|
@@ -126,6 +126,7 @@ class DpkComputeConfig(entities.DlEntity):
|
|
|
126
126
|
driver_id: str = entities.DlProperty(location=['driverId'], _type=str)
|
|
127
127
|
versions: dict = entities.DlProperty(location=['versions'], _type=dict)
|
|
128
128
|
name: str = entities.DlProperty(location=['name'], _type=str)
|
|
129
|
+
integrations: List[dict] = entities.DlProperty(location=['integrations'], _type=list)
|
|
129
130
|
|
|
130
131
|
def to_json(self) -> dict:
|
|
131
132
|
return self._dict.copy()
|
|
@@ -226,6 +227,7 @@ class Components(entities.DlEntity):
|
|
|
226
227
|
class Dpk(entities.DlEntity):
|
|
227
228
|
# name change
|
|
228
229
|
id: str = entities.DlProperty(location=['id'], _type=str)
|
|
230
|
+
base_id: str = entities.DlProperty(location=['baseId'], _type=str)
|
|
229
231
|
name: str = entities.DlProperty(location=['name'], _type=str)
|
|
230
232
|
version: str = entities.DlProperty(location=['version'], _type=str)
|
|
231
233
|
attributes: list = entities.DlProperty(location=['attributes'], _type=dict)
|
|
@@ -388,6 +390,19 @@ class Dpk(entities.DlEntity):
|
|
|
388
390
|
self._revisions = self._get_revision_pages()
|
|
389
391
|
return self._revisions
|
|
390
392
|
|
|
393
|
+
def get_revisions(self, version: str):
|
|
394
|
+
"""
|
|
395
|
+
Get the dpk with the specified version.
|
|
396
|
+
|
|
397
|
+
:param str version: the version of the dpk to get.
|
|
398
|
+
:return: Dpk
|
|
399
|
+
|
|
400
|
+
** Example **
|
|
401
|
+
..code-block:: python
|
|
402
|
+
dpk = dpk.get_revisions(version='1.0.0')
|
|
403
|
+
"""
|
|
404
|
+
return self.dpks.get_revisions(dpk_id=self.base_id, version=version)
|
|
405
|
+
|
|
391
406
|
@staticmethod
|
|
392
407
|
def _protected_from_json(_json, client_api, project, is_fetched=True):
|
|
393
408
|
"""
|
dtlpy/entities/execution.py
CHANGED
|
@@ -13,8 +13,12 @@ logger = logging.getLogger(name='dtlpy')
|
|
|
13
13
|
class ExecutionStatus(str, Enum):
|
|
14
14
|
SUCCESS = "success"
|
|
15
15
|
FAILED = "failed"
|
|
16
|
-
IN_PROGRESS = "
|
|
16
|
+
IN_PROGRESS = "in-progress"
|
|
17
17
|
CREATED = "created"
|
|
18
|
+
TERMINATED = 'terminated',
|
|
19
|
+
ABORTED = 'aborted'
|
|
20
|
+
CANCELED = 'canceled'
|
|
21
|
+
SYSTEM_FAILURE = 'system-failure'
|
|
18
22
|
|
|
19
23
|
|
|
20
24
|
@attr.s
|
|
@@ -377,3 +381,11 @@ class Execution(entities.BaseEntity):
|
|
|
377
381
|
:return: Service execution object
|
|
378
382
|
"""
|
|
379
383
|
return self.executions.wait(execution_id=self.id)
|
|
384
|
+
|
|
385
|
+
def in_progress(self):
|
|
386
|
+
return self.latest_status['status'] not in [ExecutionStatus.FAILED,
|
|
387
|
+
ExecutionStatus.SUCCESS,
|
|
388
|
+
ExecutionStatus.TERMINATED,
|
|
389
|
+
ExecutionStatus.ABORTED,
|
|
390
|
+
ExecutionStatus.CANCELED,
|
|
391
|
+
ExecutionStatus.SYSTEM_FAILURE]
|
dtlpy/entities/filters.py
CHANGED
|
@@ -2,6 +2,7 @@ import urllib.parse
|
|
|
2
2
|
import logging
|
|
3
3
|
import json
|
|
4
4
|
import os
|
|
5
|
+
import io
|
|
5
6
|
from enum import Enum
|
|
6
7
|
|
|
7
8
|
from .. import exceptions, entities
|
|
@@ -139,7 +140,7 @@ class Filters:
|
|
|
139
140
|
|
|
140
141
|
@property
|
|
141
142
|
def resource(self):
|
|
142
|
-
return self._resource
|
|
143
|
+
return f'{self._resource.value}' if isinstance(self._resource, FiltersResource) else f'{self._resource}'
|
|
143
144
|
|
|
144
145
|
@resource.setter
|
|
145
146
|
def resource(self, resource):
|
|
@@ -474,7 +475,7 @@ class Filters:
|
|
|
474
475
|
"""
|
|
475
476
|
if value not in [FiltersOrderByDirection.ASCENDING, FiltersOrderByDirection.DESCENDING]:
|
|
476
477
|
raise exceptions.PlatformException(error='400', message='Sort can be by ascending or descending order only')
|
|
477
|
-
self.sort[field] = value
|
|
478
|
+
self.sort[field] = value.value if isinstance(value, FiltersOrderByDirection) else value
|
|
478
479
|
|
|
479
480
|
def platform_url(self, resource) -> str:
|
|
480
481
|
"""
|
|
@@ -488,14 +489,17 @@ class Filters:
|
|
|
488
489
|
# add the view option
|
|
489
490
|
_json['view'] = 'icons'
|
|
490
491
|
# convert from enum to string
|
|
491
|
-
_json["resource"] = '{
|
|
492
|
+
_json["resource"] = f'{_json["resource"]}'
|
|
492
493
|
# convert the dictionary to a json string
|
|
493
|
-
_json['dqlFilter'] = json.dumps({'filter': _json.pop('filter')
|
|
494
|
+
_json['dqlFilter'] = json.dumps({'filter': _json.pop('filter'),
|
|
495
|
+
'join': _json.pop('join'),
|
|
496
|
+
'sort': _json.get('sort')})
|
|
494
497
|
# set the page size as the UI default
|
|
495
498
|
_json['pageSize'] = 100
|
|
499
|
+
_json['page'] = _json['page']
|
|
496
500
|
# build the url for the dataset data browser
|
|
497
501
|
if isinstance(resource, entities.Dataset):
|
|
498
|
-
url = resource.platform_url + '
|
|
502
|
+
url = resource.platform_url + f'?{urllib.parse.urlencode(_json)}'
|
|
499
503
|
else:
|
|
500
504
|
raise NotImplementedError('Not implemented for resource type: {}'.format(type(resource)))
|
|
501
505
|
return url
|
|
@@ -511,6 +515,81 @@ class Filters:
|
|
|
511
515
|
else:
|
|
512
516
|
raise NotImplementedError('Not implemented for resource type: {}'.format(type(resource)))
|
|
513
517
|
|
|
518
|
+
def save(self, project: entities.Project, filter_name: str):
|
|
519
|
+
"""
|
|
520
|
+
Save the current DQL filter to the project
|
|
521
|
+
|
|
522
|
+
:param project: dl.Project
|
|
523
|
+
:param filter_name: the saved filter's name
|
|
524
|
+
:return: True if success
|
|
525
|
+
"""
|
|
526
|
+
_json_filter = self.prepare()
|
|
527
|
+
shebang_dict = {"type": "dql",
|
|
528
|
+
"shebang": "dataloop",
|
|
529
|
+
"metadata": {
|
|
530
|
+
"version": "1.0.0",
|
|
531
|
+
"system": {
|
|
532
|
+
"mimetype": "dql"
|
|
533
|
+
},
|
|
534
|
+
"dltype": "filter",
|
|
535
|
+
"filterFieldsState": [],
|
|
536
|
+
"resource": "items",
|
|
537
|
+
"filter": _json_filter.pop('filter'),
|
|
538
|
+
"join": _json_filter.pop('join')
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
b_dataset = project.datasets._get_binaries_dataset()
|
|
542
|
+
byte_io = io.BytesIO()
|
|
543
|
+
byte_io.name = filter_name
|
|
544
|
+
byte_io.write(json.dumps(shebang_dict).encode())
|
|
545
|
+
byte_io.seek(0)
|
|
546
|
+
b_dataset.items.upload(local_path=byte_io,
|
|
547
|
+
remote_path='/.dataloop/dqlfilters/items',
|
|
548
|
+
remote_name=filter_name)
|
|
549
|
+
return True
|
|
550
|
+
|
|
551
|
+
@classmethod
|
|
552
|
+
def load(cls, project: entities.Project, filter_name: str) -> 'Filters':
|
|
553
|
+
"""
|
|
554
|
+
Load a saved filter from the project by name
|
|
555
|
+
|
|
556
|
+
:param project: dl.Project entity
|
|
557
|
+
:param filter_name: filter name
|
|
558
|
+
:return: dl.Filters
|
|
559
|
+
"""
|
|
560
|
+
b_dataset = project.datasets._get_binaries_dataset()
|
|
561
|
+
f = entities.Filters(custom_filter={
|
|
562
|
+
'filter': {'$and': [{'filename': f'/.dataloop/dqlfilters/items/{filter_name}'}]},
|
|
563
|
+
'page': 0,
|
|
564
|
+
'pageSize': 1000,
|
|
565
|
+
'resource': 'items'
|
|
566
|
+
})
|
|
567
|
+
pages = b_dataset.items.list(filters=f)
|
|
568
|
+
if pages.items_count == 0:
|
|
569
|
+
raise exceptions.NotFound(
|
|
570
|
+
f'Saved filter not found: {filter_name}. Run `Filters.list()` to list existing filters')
|
|
571
|
+
with open(pages.items[0].download()) as f:
|
|
572
|
+
data = json.load(f)
|
|
573
|
+
custom_filter = data['metadata']['filter']
|
|
574
|
+
custom_filter['join'] = data['metadata']['join']
|
|
575
|
+
return cls(custom_filter=custom_filter)
|
|
576
|
+
|
|
577
|
+
@staticmethod
|
|
578
|
+
def list(project: entities.Project) -> list:
|
|
579
|
+
"""
|
|
580
|
+
List all saved filters for a project
|
|
581
|
+
:param project: dl.Project entity
|
|
582
|
+
:return: a list of all the saved filters' names
|
|
583
|
+
"""
|
|
584
|
+
b_dataset = project.datasets._get_binaries_dataset()
|
|
585
|
+
f = entities.Filters(use_defaults=False,
|
|
586
|
+
field='dir',
|
|
587
|
+
values='/.dataloop/dqlfilters/items')
|
|
588
|
+
pages = b_dataset.items.list(filters=f)
|
|
589
|
+
all_filter_items = list(pages.all())
|
|
590
|
+
names = [i.name for i in all_filter_items]
|
|
591
|
+
return names
|
|
592
|
+
|
|
514
593
|
|
|
515
594
|
class SingleFilter:
|
|
516
595
|
def __init__(self, field, values, operator: FiltersOperations = None):
|
|
@@ -552,6 +631,6 @@ class SingleFilter:
|
|
|
552
631
|
_json[self.field] = value
|
|
553
632
|
|
|
554
633
|
return _json
|
|
555
|
-
|
|
634
|
+
|
|
556
635
|
def print(self, indent=2):
|
|
557
636
|
print(json.dumps(self.prepare(), indent=indent))
|
dtlpy/entities/integration.py
CHANGED
|
@@ -79,7 +79,7 @@ class Integration(entities.BaseEntity):
|
|
|
79
79
|
name=_json.get('name', None),
|
|
80
80
|
creator=_json.get('creator', None),
|
|
81
81
|
created_at=_json.get('createdAt', None),
|
|
82
|
-
update_at=_json.get('
|
|
82
|
+
update_at=_json.get('updatedAt', None),
|
|
83
83
|
type=_json.get('type', None),
|
|
84
84
|
org=_json.get('org', None),
|
|
85
85
|
client_api=client_api,
|
dtlpy/entities/item.py
CHANGED
|
@@ -661,6 +661,32 @@ class Item(entities.BaseEntity):
|
|
|
661
661
|
else:
|
|
662
662
|
raise exceptions.PlatformException('400', 'must provide assignment_id or task_id')
|
|
663
663
|
|
|
664
|
+
def status(self, assignment_id: str = None, task_id: str = None):
|
|
665
|
+
"""
|
|
666
|
+
Get item status
|
|
667
|
+
|
|
668
|
+
:param str assignment_id: assignment id
|
|
669
|
+
:param str task_id: task id
|
|
670
|
+
|
|
671
|
+
:return: status
|
|
672
|
+
:rtype: str
|
|
673
|
+
|
|
674
|
+
**Example**:
|
|
675
|
+
|
|
676
|
+
.. code-block:: python
|
|
677
|
+
|
|
678
|
+
status = item.status(task_id='task_id')
|
|
679
|
+
"""
|
|
680
|
+
if not assignment_id and not task_id:
|
|
681
|
+
raise exceptions.PlatformException('400', 'must provide assignment_id or task_id')
|
|
682
|
+
status = None
|
|
683
|
+
resource_id = assignment_id if assignment_id else task_id
|
|
684
|
+
for ref in self.metadata.get('system', dict()).get('refs', []):
|
|
685
|
+
if ref.get('id') == resource_id:
|
|
686
|
+
status = ref.get('metadata', {}).get('status', None)
|
|
687
|
+
break
|
|
688
|
+
return status
|
|
689
|
+
|
|
664
690
|
def set_description(self, text: str):
|
|
665
691
|
"""
|
|
666
692
|
Update Item description
|
dtlpy/entities/node.py
CHANGED
|
@@ -120,7 +120,7 @@ class PipelineNodeIO:
|
|
|
120
120
|
self.description = description
|
|
121
121
|
|
|
122
122
|
if action is not None:
|
|
123
|
-
warnings.warn('action param has been deprecated in version 1.
|
|
123
|
+
warnings.warn('action param has been deprecated in version 1.95', DeprecationWarning)
|
|
124
124
|
if actions is None:
|
|
125
125
|
actions = []
|
|
126
126
|
actions.append(action)
|
|
@@ -128,7 +128,7 @@ class PipelineNodeIO:
|
|
|
128
128
|
|
|
129
129
|
@property
|
|
130
130
|
def action(self):
|
|
131
|
-
warnings.warn('action attribute has been deprecated in version 1.
|
|
131
|
+
warnings.warn('action attribute has been deprecated in version 1.95', DeprecationWarning)
|
|
132
132
|
return None
|
|
133
133
|
|
|
134
134
|
@staticmethod
|
|
@@ -921,12 +921,20 @@ class DatasetNode(PipelineNode):
|
|
|
921
921
|
project_id: str,
|
|
922
922
|
dataset_id: str,
|
|
923
923
|
dataset_folder: str = None,
|
|
924
|
+
load_existing_data: bool = False,
|
|
925
|
+
data_filters: entities.Filters = None,
|
|
924
926
|
position: tuple = (1, 1)):
|
|
925
927
|
"""
|
|
926
928
|
:param str name: node name
|
|
927
929
|
:param str project_id: project id
|
|
928
930
|
:param str dataset_id: dataset id
|
|
929
931
|
:param str dataset_folder: folder in dataset to work in it
|
|
932
|
+
:param bool load_existing_data: optional - enable to automatically load existing data into the
|
|
933
|
+
pipeline (executions) upon activation, based on the defined dataset,
|
|
934
|
+
folder, and data_filters.
|
|
935
|
+
:param entities.Filters data_filters: optional - filters entity or a dictionary containing filters parameters.
|
|
936
|
+
Use to filter the data items to be loaded when load_existing_data
|
|
937
|
+
is enabled.
|
|
930
938
|
:param tuple position: tuple of the node place
|
|
931
939
|
"""
|
|
932
940
|
inputs = [self._default_io()]
|
|
@@ -944,6 +952,8 @@ class DatasetNode(PipelineNode):
|
|
|
944
952
|
position=position)
|
|
945
953
|
self.dataset_id = dataset_id
|
|
946
954
|
self.dataset_folder = dataset_folder
|
|
955
|
+
self.load_existing_data = load_existing_data
|
|
956
|
+
self.data_filters = data_filters
|
|
947
957
|
|
|
948
958
|
@property
|
|
949
959
|
def dataset_id(self):
|
|
@@ -964,6 +974,32 @@ class DatasetNode(PipelineNode):
|
|
|
964
974
|
dataset_folder = '/' + dataset_folder
|
|
965
975
|
self.metadata['dir'] = dataset_folder
|
|
966
976
|
|
|
977
|
+
@property
|
|
978
|
+
def load_existing_data(self):
|
|
979
|
+
return self.metadata.get('triggerToPipeline', {}).get('active', False)
|
|
980
|
+
|
|
981
|
+
@load_existing_data.setter
|
|
982
|
+
def load_existing_data(self, load_existing_data: bool):
|
|
983
|
+
if load_existing_data:
|
|
984
|
+
self.metadata.setdefault('triggerToPipeline', {})['active'] = True
|
|
985
|
+
else:
|
|
986
|
+
self.metadata.pop('triggerToPipeline', None)
|
|
987
|
+
|
|
988
|
+
@property
|
|
989
|
+
def data_filters(self):
|
|
990
|
+
data_filters = self.metadata.get('triggerToPipeline', {}).get('filter', None)
|
|
991
|
+
if data_filters:
|
|
992
|
+
data_filters = entities.Filters(custom_filter=json.loads(data_filters))
|
|
993
|
+
return data_filters
|
|
994
|
+
|
|
995
|
+
@data_filters.setter
|
|
996
|
+
def data_filters(self, data_filters: entities.Filters):
|
|
997
|
+
if data_filters is None:
|
|
998
|
+
filters = None
|
|
999
|
+
else:
|
|
1000
|
+
filters = json.dumps(data_filters.prepare(query_only=True).get('filter'))
|
|
1001
|
+
self.metadata.setdefault('triggerToPipeline', {})['filter'] = filters
|
|
1002
|
+
|
|
967
1003
|
@staticmethod
|
|
968
1004
|
def from_json(_json: dict):
|
|
969
1005
|
parent = PipelineNode.from_json(_json)
|
dtlpy/entities/ontology.py
CHANGED
|
@@ -741,3 +741,64 @@ class Ontology(entities.BaseEntity):
|
|
|
741
741
|
"""
|
|
742
742
|
|
|
743
743
|
return self.ontologies.delete_attributes(ontology_id=self.id, keys=keys)
|
|
744
|
+
|
|
745
|
+
def copy_from(self, ontology_json: dict):
|
|
746
|
+
"""
|
|
747
|
+
Import ontology to the platform ('ontology' is taken before 'ontology_json')
|
|
748
|
+
|
|
749
|
+
:param dict ontology_json: ontology json
|
|
750
|
+
:return: Ontology object
|
|
751
|
+
:rtype: dtlpy.entities.ontology.Ontology
|
|
752
|
+
|
|
753
|
+
**Example**:
|
|
754
|
+
|
|
755
|
+
.. code-block:: python
|
|
756
|
+
|
|
757
|
+
ontology = ontology.import_ontology(ontology_json=ontology_json)
|
|
758
|
+
"""
|
|
759
|
+
# TODO: Add support for import from ontology entity
|
|
760
|
+
ontology = self.from_json(_json=ontology_json, client_api=self._client_api, recipe=self.recipe)
|
|
761
|
+
attributes = ontology.attributes
|
|
762
|
+
|
|
763
|
+
# params
|
|
764
|
+
self.labels = ontology.labels
|
|
765
|
+
for key, value in ontology.metadata.items():
|
|
766
|
+
if key != "system":
|
|
767
|
+
self.metadata[key] = value
|
|
768
|
+
|
|
769
|
+
if attributes:
|
|
770
|
+
# Delete irrelevant attribute keys
|
|
771
|
+
attribute_keys = [attribute.get("key", None) for attribute in attributes]
|
|
772
|
+
to_delete_keys = [attribute.get("key", None) for attribute in self.attributes
|
|
773
|
+
if attribute.get("key", None) not in attribute_keys]
|
|
774
|
+
self.delete_attributes(keys=to_delete_keys)
|
|
775
|
+
|
|
776
|
+
# Update attributes
|
|
777
|
+
for attribute in attributes:
|
|
778
|
+
attribute_range = attribute.get("range", None)
|
|
779
|
+
if attribute_range is not None:
|
|
780
|
+
attribute_range = entities.AttributesRange(
|
|
781
|
+
min_range=attribute_range.get("min", None),
|
|
782
|
+
max_range=attribute_range.get("max", None),
|
|
783
|
+
step=attribute_range.get("step", None)
|
|
784
|
+
)
|
|
785
|
+
|
|
786
|
+
script_data = attribute.get("scriptData", None)
|
|
787
|
+
if script_data is None:
|
|
788
|
+
raise Exception(f"Attribute '{attribute.get('key')}' scriptData is missing in the ontology json!")
|
|
789
|
+
self.update_attributes(
|
|
790
|
+
title=script_data.get("title", None),
|
|
791
|
+
key=attribute.get("key", None),
|
|
792
|
+
attribute_type=attribute.get("type", None),
|
|
793
|
+
scope=attribute.get("scope", None),
|
|
794
|
+
optional=script_data.get("optional", None),
|
|
795
|
+
values=attribute.get("values", None),
|
|
796
|
+
attribute_range=attribute_range
|
|
797
|
+
)
|
|
798
|
+
else:
|
|
799
|
+
logger.warning("No attributes were found (Make sure that you use the correct attributes mode).")
|
|
800
|
+
|
|
801
|
+
# defaults
|
|
802
|
+
self._instance_map = ontology.instance_map
|
|
803
|
+
self._color_map = ontology.color_map
|
|
804
|
+
return self.update(system_metadata=True)
|
|
@@ -97,6 +97,8 @@ class FunctionIO(entities.DlEntity):
|
|
|
97
97
|
name = entities.DlProperty(location=['name'], _type=str)
|
|
98
98
|
actions = entities.DlProperty(location=['actions'], _type=list)
|
|
99
99
|
description = entities.DlProperty(location=['description'], _type=str)
|
|
100
|
+
integration = entities.DlProperty(location=['integration'], _type=dict)
|
|
101
|
+
mandatory = entities.DlProperty(location=['mandatory'], _type=bool)
|
|
100
102
|
|
|
101
103
|
def __repr__(self):
|
|
102
104
|
# TODO need to move to DlEntity
|
dtlpy/entities/package_module.py
CHANGED
|
@@ -75,7 +75,20 @@ class PackageModule(entities.DlEntity):
|
|
|
75
75
|
for cls_name, cls_inst in inspect.getmembers(file_module, predicate=inspect.isclass):
|
|
76
76
|
spec = getattr(cls_inst, '__dtlpy__', None)
|
|
77
77
|
if spec is not None:
|
|
78
|
+
functions = spec['functions']
|
|
79
|
+
available_methods = [name for name in ['train', 'predict']
|
|
80
|
+
if 'BaseModelAdapter' not in getattr(cls_inst, name).__qualname__]
|
|
81
|
+
if "train" not in available_methods:
|
|
82
|
+
# remove train_model from functions list if train is not available
|
|
83
|
+
functions[:] = [d for d in functions if d.get('name') != "train_model"]
|
|
84
|
+
if "predict" not in available_methods:
|
|
85
|
+
# remove predict_items from functions list if predict is not available
|
|
86
|
+
functions[:] = [d for d in functions if d.get('name') != "predict_items"]
|
|
87
|
+
if "extract_features" not in available_methods:
|
|
88
|
+
# remove extract_item_features from functions list if extract_features is not available
|
|
89
|
+
functions[:] = [d for d in functions if d.get('name') != "extract_item_features"]
|
|
78
90
|
spec['entryPoint'] = entry_point
|
|
91
|
+
spec['functions'] = functions
|
|
79
92
|
module = cls.from_json(spec)
|
|
80
93
|
break
|
|
81
94
|
if module is None:
|
|
@@ -76,6 +76,7 @@ class PipelineExecution(entities.BaseEntity):
|
|
|
76
76
|
|
|
77
77
|
# sdk
|
|
78
78
|
_pipeline = attr.ib(repr=False)
|
|
79
|
+
_project = attr.ib(repr=False)
|
|
79
80
|
_client_api = attr.ib(type=ApiClient, repr=False)
|
|
80
81
|
_repositories = attr.ib(repr=False)
|
|
81
82
|
|
|
@@ -103,7 +104,7 @@ class PipelineExecution(entities.BaseEntity):
|
|
|
103
104
|
return status, pipeline
|
|
104
105
|
|
|
105
106
|
@classmethod
|
|
106
|
-
def from_json(cls, _json, client_api, pipeline, is_fetched=True):
|
|
107
|
+
def from_json(cls, _json, client_api, pipeline, is_fetched=True) -> 'PipelineExecution':
|
|
107
108
|
"""
|
|
108
109
|
Turn platform representation of pipeline_execution into a pipeline_execution entity
|
|
109
110
|
|
|
@@ -112,9 +113,11 @@ class PipelineExecution(entities.BaseEntity):
|
|
|
112
113
|
:param dtlpy.entities.pipeline.Pipeline pipeline: Pipeline entity
|
|
113
114
|
:param bool is_fetched: is Entity fetched from Platform
|
|
114
115
|
:return: Pipeline entity
|
|
115
|
-
:rtype: dtlpy.entities.
|
|
116
|
+
:rtype: dtlpy.entities.PipelineExecution
|
|
116
117
|
"""
|
|
118
|
+
project = None
|
|
117
119
|
if pipeline is not None:
|
|
120
|
+
project = pipeline._project
|
|
118
121
|
if pipeline.id != _json.get('pipelineId', None):
|
|
119
122
|
logger.warning('Pipeline has been fetched from a project that is not belong to it')
|
|
120
123
|
pipeline = None
|
|
@@ -132,6 +135,7 @@ class PipelineExecution(entities.BaseEntity):
|
|
|
132
135
|
nodes=nodes,
|
|
133
136
|
executions=_json.get('executions', dict()),
|
|
134
137
|
pipeline=pipeline,
|
|
138
|
+
project=project,
|
|
135
139
|
client_api=client_api,
|
|
136
140
|
)
|
|
137
141
|
|
|
@@ -183,7 +187,10 @@ class PipelineExecution(entities.BaseEntity):
|
|
|
183
187
|
|
|
184
188
|
@property
|
|
185
189
|
def project(self):
|
|
186
|
-
|
|
190
|
+
if self._project is None:
|
|
191
|
+
self._project = self.pipeline.project
|
|
192
|
+
assert isinstance(self._pipeline.project, entities.Project)
|
|
193
|
+
return self._pipeline.project
|
|
187
194
|
|
|
188
195
|
################
|
|
189
196
|
# repositories #
|
|
@@ -195,9 +202,10 @@ class PipelineExecution(entities.BaseEntity):
|
|
|
195
202
|
|
|
196
203
|
r = reps(
|
|
197
204
|
projects=repositories.Projects(client_api=self._client_api),
|
|
198
|
-
pipelines=repositories.Pipelines(client_api=self._client_api, project=self.
|
|
199
|
-
pipeline_executions=repositories.PipelineExecutions(client_api=self._client_api,
|
|
200
|
-
|
|
205
|
+
pipelines=repositories.Pipelines(client_api=self._client_api, project=self._project),
|
|
206
|
+
pipeline_executions=repositories.PipelineExecutions(client_api=self._client_api,
|
|
207
|
+
project=self._project,
|
|
208
|
+
pipeline=self._pipeline)
|
|
201
209
|
)
|
|
202
210
|
return r
|
|
203
211
|
|
dtlpy/entities/prompt_item.py
CHANGED
|
@@ -78,6 +78,16 @@ class PromptItem:
|
|
|
78
78
|
prompts_json["prompts"][prompt_key] = prompt_values
|
|
79
79
|
return prompts_json
|
|
80
80
|
|
|
81
|
+
@classmethod
|
|
82
|
+
def from_json(cls, _json):
|
|
83
|
+
inst = cls(name='dummy')
|
|
84
|
+
for prompt_key, prompt_values in _json["prompts"].items():
|
|
85
|
+
prompt = Prompt(key=prompt_key)
|
|
86
|
+
for val in prompt_values:
|
|
87
|
+
prompt.add(mimetype=val['mimetype'], value=val['value'])
|
|
88
|
+
inst.prompts.append(prompt)
|
|
89
|
+
return inst
|
|
90
|
+
|
|
81
91
|
def to_bytes_io(self):
|
|
82
92
|
byte_io = io.BytesIO()
|
|
83
93
|
byte_io.name = self.name
|
dtlpy/entities/recipe.py
CHANGED
|
@@ -262,3 +262,40 @@ class Recipe(entities.BaseEntity):
|
|
|
262
262
|
'datasetId': dataset.id,
|
|
263
263
|
'name': instruction_item.name}
|
|
264
264
|
self.update(True)
|
|
265
|
+
|
|
266
|
+
def upload_annotations_verification_file(self, local_path: str, overwrite: bool = False) -> entities.Item:
|
|
267
|
+
"""
|
|
268
|
+
Add Annotations Verification js file to the recipe.
|
|
269
|
+
|
|
270
|
+
:param str local_path: file path of the annotations verification js file.
|
|
271
|
+
:param bool overwrite: overwrite exiting file if the local and the remote names are matching
|
|
272
|
+
:return: annotations verification js item.
|
|
273
|
+
"""
|
|
274
|
+
|
|
275
|
+
validation_file_metadata = self.metadata.get("system", dict()).get("validationFile", None)
|
|
276
|
+
if validation_file_metadata is None:
|
|
277
|
+
validation_file_metadata = dict()
|
|
278
|
+
|
|
279
|
+
remote_name = validation_file_metadata.get("name", None)
|
|
280
|
+
local_name = os.path.basename(local_path)
|
|
281
|
+
binaries_dataset = self._project.datasets._get_binaries_dataset()
|
|
282
|
+
remote_path = f"/.dataloop/recipes/{self.id}/verification/"
|
|
283
|
+
|
|
284
|
+
if remote_name is None or overwrite or remote_name != local_name:
|
|
285
|
+
validation_item = binaries_dataset.items.upload(
|
|
286
|
+
local_path=local_path,
|
|
287
|
+
remote_path=remote_path,
|
|
288
|
+
remote_name=local_name,
|
|
289
|
+
overwrite=True
|
|
290
|
+
)
|
|
291
|
+
self.metadata["system"]["validationFile"] = {
|
|
292
|
+
"itemId": validation_item.id,
|
|
293
|
+
"datasetId": binaries_dataset.id,
|
|
294
|
+
"name": local_name
|
|
295
|
+
}
|
|
296
|
+
self.update(system_metadata=True)
|
|
297
|
+
else:
|
|
298
|
+
logger.debug(f"Existing Annotations Validation Script was found.")
|
|
299
|
+
validation_item_id = self.metadata["system"]["validationFile"]["itemId"]
|
|
300
|
+
validation_item = binaries_dataset.items.get(item_id=validation_item_id)
|
|
301
|
+
return validation_item
|
dtlpy/entities/service.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import warnings
|
|
1
2
|
from collections import namedtuple
|
|
2
3
|
from enum import Enum
|
|
3
4
|
import traceback
|
|
@@ -219,6 +220,7 @@ class Service(entities.BaseEntity):
|
|
|
219
220
|
on_reset = attr.ib(type=OnResetAction)
|
|
220
221
|
_type = attr.ib(type=ServiceType)
|
|
221
222
|
project_id = attr.ib()
|
|
223
|
+
org_id = attr.ib()
|
|
222
224
|
is_global = attr.ib()
|
|
223
225
|
max_attempts = attr.ib()
|
|
224
226
|
mode = attr.ib(repr=False)
|
|
@@ -235,6 +237,8 @@ class Service(entities.BaseEntity):
|
|
|
235
237
|
_project = attr.ib(default=None, repr=False)
|
|
236
238
|
_repositories = attr.ib(repr=False)
|
|
237
239
|
updated_by = attr.ib(default=None)
|
|
240
|
+
app = attr.ib(default=None)
|
|
241
|
+
integrations = attr.ib(default=None)
|
|
238
242
|
|
|
239
243
|
@property
|
|
240
244
|
def createdAt(self):
|
|
@@ -334,6 +338,9 @@ class Service(entities.BaseEntity):
|
|
|
334
338
|
updated_by=_json.get('updatedBy', None),
|
|
335
339
|
config=_json.get('config', None),
|
|
336
340
|
settings=_json.get('settings', None),
|
|
341
|
+
app=_json.get('app', None),
|
|
342
|
+
integrations=_json.get('integrations', None),
|
|
343
|
+
org_id=_json.get('orgId', None)
|
|
337
344
|
)
|
|
338
345
|
inst.is_fetched = is_fetched
|
|
339
346
|
return inst
|
|
@@ -368,7 +375,19 @@ class Service(entities.BaseEntity):
|
|
|
368
375
|
log_error=False)
|
|
369
376
|
assert isinstance(self._package, entities.Package)
|
|
370
377
|
except:
|
|
371
|
-
|
|
378
|
+
dpk_id = None
|
|
379
|
+
dpk_version = None
|
|
380
|
+
if self.app and isinstance(self.app, dict):
|
|
381
|
+
dpk_id = self.app.get('dpkId', None)
|
|
382
|
+
dpk_version = self.app.get('dpkVersion', None)
|
|
383
|
+
if dpk_id is None:
|
|
384
|
+
self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get(
|
|
385
|
+
dpk_id=self.package_id)
|
|
386
|
+
else:
|
|
387
|
+
self._package = repositories.Dpks(client_api=self._client_api, project=self.project).get_revisions(
|
|
388
|
+
dpk_id=dpk_id,
|
|
389
|
+
version=dpk_version)
|
|
390
|
+
|
|
372
391
|
assert isinstance(self._package, entities.Dpk)
|
|
373
392
|
return self._package
|
|
374
393
|
|
|
@@ -462,11 +481,15 @@ class Service(entities.BaseEntity):
|
|
|
462
481
|
attr.fields(Service).archive,
|
|
463
482
|
attr.fields(Service).updated_by,
|
|
464
483
|
attr.fields(Service).config,
|
|
465
|
-
attr.fields(Service).settings
|
|
484
|
+
attr.fields(Service).settings,
|
|
485
|
+
attr.fields(Service).app,
|
|
486
|
+
attr.fields(Service).integrations,
|
|
487
|
+
attr.fields(Service).org_id
|
|
466
488
|
)
|
|
467
489
|
)
|
|
468
490
|
|
|
469
491
|
_json['projectId'] = self.project_id
|
|
492
|
+
_json['orgId'] = self.org_id
|
|
470
493
|
_json['packageId'] = self.package_id
|
|
471
494
|
_json['initParams'] = self.init_input
|
|
472
495
|
_json['moduleName'] = self.module_name
|
|
@@ -518,6 +541,12 @@ class Service(entities.BaseEntity):
|
|
|
518
541
|
if self.settings is not None:
|
|
519
542
|
_json['settings'] = self.settings
|
|
520
543
|
|
|
544
|
+
if self.app is not None:
|
|
545
|
+
_json['app'] = self.app
|
|
546
|
+
|
|
547
|
+
if self.integrations is not None:
|
|
548
|
+
_json['integrations'] = self.integrations
|
|
549
|
+
|
|
521
550
|
return _json
|
|
522
551
|
|
|
523
552
|
def update(self, force=False):
|