dtlpy 1.106.5__py3-none-any.whl → 1.107.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__version__.py +1 -1
- dtlpy/entities/dataset.py +26 -9
- dtlpy/entities/item.py +6 -2
- dtlpy/entities/task.py +1 -1
- dtlpy/ml/base_model_adapter.py +3 -0
- dtlpy/repositories/datasets.py +18 -7
- dtlpy/repositories/downloader.py +17 -13
- dtlpy/repositories/integrations.py +43 -26
- dtlpy/repositories/items.py +5 -2
- dtlpy/repositories/tasks.py +3 -2
- {dtlpy-1.106.5.dist-info → dtlpy-1.107.8.dist-info}/METADATA +1 -1
- {dtlpy-1.106.5.dist-info → dtlpy-1.107.8.dist-info}/RECORD +19 -19
- {dtlpy-1.106.5.data → dtlpy-1.107.8.data}/scripts/dlp +0 -0
- {dtlpy-1.106.5.data → dtlpy-1.107.8.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.106.5.data → dtlpy-1.107.8.data}/scripts/dlp.py +0 -0
- {dtlpy-1.106.5.dist-info → dtlpy-1.107.8.dist-info}/LICENSE +0 -0
- {dtlpy-1.106.5.dist-info → dtlpy-1.107.8.dist-info}/WHEEL +0 -0
- {dtlpy-1.106.5.dist-info → dtlpy-1.107.8.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.106.5.dist-info → dtlpy-1.107.8.dist-info}/top_level.txt +0 -0
dtlpy/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = '1.
|
|
1
|
+
version = '1.107.8'
|
dtlpy/entities/dataset.py
CHANGED
|
@@ -625,7 +625,8 @@ class Dataset(entities.BaseEntity):
|
|
|
625
625
|
filter_output_annotations=False,
|
|
626
626
|
alpha=1,
|
|
627
627
|
export_version=ExportVersion.V1,
|
|
628
|
-
dataset_lock=False
|
|
628
|
+
dataset_lock=False,
|
|
629
|
+
lock_timeout_sec=None
|
|
629
630
|
):
|
|
630
631
|
"""
|
|
631
632
|
Download dataset by filters.
|
|
@@ -640,6 +641,7 @@ class Dataset(entities.BaseEntity):
|
|
|
640
641
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
641
642
|
:param bool overwrite: optional - default = False
|
|
642
643
|
:param bool dataset_lock: optional - default = False
|
|
644
|
+
:param int lock_timeout_sec: optional
|
|
643
645
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
644
646
|
:param bool with_text: optional - add text to annotations, default = False
|
|
645
647
|
:param str remote_path: DEPRECATED and ignored
|
|
@@ -663,6 +665,7 @@ class Dataset(entities.BaseEntity):
|
|
|
663
665
|
with_text=False,
|
|
664
666
|
alpha=1,
|
|
665
667
|
dataset_lock=False
|
|
668
|
+
lock_timeout_sec=300
|
|
666
669
|
)
|
|
667
670
|
"""
|
|
668
671
|
|
|
@@ -681,7 +684,8 @@ class Dataset(entities.BaseEntity):
|
|
|
681
684
|
filter_output_annotations=filter_output_annotations,
|
|
682
685
|
alpha=alpha,
|
|
683
686
|
export_version=export_version,
|
|
684
|
-
dataset_lock=dataset_lock
|
|
687
|
+
dataset_lock=dataset_lock,
|
|
688
|
+
lock_timeout_sec=lock_timeout_sec
|
|
685
689
|
)
|
|
686
690
|
|
|
687
691
|
def export(self,
|
|
@@ -693,7 +697,8 @@ class Dataset(entities.BaseEntity):
|
|
|
693
697
|
include_annotations: bool = False,
|
|
694
698
|
export_type: ExportType = ExportType.JSON,
|
|
695
699
|
timeout: int = 0,
|
|
696
|
-
dataset_lock: bool = False
|
|
700
|
+
dataset_lock: bool = False,
|
|
701
|
+
lock_timeout_sec: int = None):
|
|
697
702
|
"""
|
|
698
703
|
Export dataset items and annotations.
|
|
699
704
|
|
|
@@ -708,6 +713,7 @@ class Dataset(entities.BaseEntity):
|
|
|
708
713
|
:param bool include_feature_vectors: Include item feature vectors in the export
|
|
709
714
|
:param bool include_annotations: Include item annotations in the export
|
|
710
715
|
:param bool dataset_lock: Make dataset readonly during the export
|
|
716
|
+
:param int lock_timeout_sec: Timeout for locking the dataset during export in seconds
|
|
711
717
|
:param entities.ExportType export_type: Type of export ('json' or 'zip')
|
|
712
718
|
:param int timeout: Maximum time in seconds to wait for the export to complete
|
|
713
719
|
:return: Exported item
|
|
@@ -732,7 +738,8 @@ class Dataset(entities.BaseEntity):
|
|
|
732
738
|
include_annotations=include_annotations,
|
|
733
739
|
export_type=export_type,
|
|
734
740
|
timeout=timeout,
|
|
735
|
-
|
|
741
|
+
dataset_lock=dataset_lock,
|
|
742
|
+
lock_timeout_sec=lock_timeout_sec)
|
|
736
743
|
|
|
737
744
|
def upload_annotations(self,
|
|
738
745
|
local_path,
|
|
@@ -967,7 +974,8 @@ class Dataset(entities.BaseEntity):
|
|
|
967
974
|
without_relative_path=None,
|
|
968
975
|
alpha=1,
|
|
969
976
|
export_version=ExportVersion.V1,
|
|
970
|
-
dataset_lock=False
|
|
977
|
+
dataset_lock=False,
|
|
978
|
+
lock_timeout_sec=None
|
|
971
979
|
):
|
|
972
980
|
"""
|
|
973
981
|
Download dataset by filters.
|
|
@@ -983,6 +991,7 @@ class Dataset(entities.BaseEntity):
|
|
|
983
991
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
984
992
|
:param bool overwrite: optional - default = False to overwrite the existing files
|
|
985
993
|
:param bool dataset_lock: optional - default = False to make dataset readonly during the download
|
|
994
|
+
:param int lock_timeout_sec: optional - Set lock timeout for the export
|
|
986
995
|
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
987
996
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
988
997
|
:param bool with_text: optional - add text to annotations, default = False
|
|
@@ -1001,7 +1010,8 @@ class Dataset(entities.BaseEntity):
|
|
|
1001
1010
|
thickness=1,
|
|
1002
1011
|
with_text=False,
|
|
1003
1012
|
alpha=1,
|
|
1004
|
-
dataset_lock=False,
|
|
1013
|
+
dataset_lock=False,
|
|
1014
|
+
lock_timeout_sec=300
|
|
1005
1015
|
)
|
|
1006
1016
|
"""
|
|
1007
1017
|
return self.items.download(filters=filters,
|
|
@@ -1016,7 +1026,9 @@ class Dataset(entities.BaseEntity):
|
|
|
1016
1026
|
without_relative_path=without_relative_path,
|
|
1017
1027
|
alpha=alpha,
|
|
1018
1028
|
export_version=export_version,
|
|
1019
|
-
dataset_lock=dataset_lock
|
|
1029
|
+
dataset_lock=dataset_lock,
|
|
1030
|
+
lock_timeout_sec=lock_timeout_sec
|
|
1031
|
+
)
|
|
1020
1032
|
|
|
1021
1033
|
def download_folder(
|
|
1022
1034
|
self,
|
|
@@ -1033,7 +1045,8 @@ class Dataset(entities.BaseEntity):
|
|
|
1033
1045
|
without_relative_path=None,
|
|
1034
1046
|
alpha=1,
|
|
1035
1047
|
export_version=ExportVersion.V1,
|
|
1036
|
-
dataset_lock=False
|
|
1048
|
+
dataset_lock=False,
|
|
1049
|
+
lock_timeout_sec=None
|
|
1037
1050
|
):
|
|
1038
1051
|
"""
|
|
1039
1052
|
Download dataset folder.
|
|
@@ -1049,6 +1062,7 @@ class Dataset(entities.BaseEntity):
|
|
|
1049
1062
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
1050
1063
|
:param bool overwrite: optional - default = False to overwrite the existing files
|
|
1051
1064
|
:param bool dataset_lock: optional - default = False to make the dataset readonly during the download
|
|
1065
|
+
:param bool lock_timeout_sec: optional - Set lock timeout for the export
|
|
1052
1066
|
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
1053
1067
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
1054
1068
|
:param bool with_text: optional - add text to annotations, default = False
|
|
@@ -1070,6 +1084,7 @@ class Dataset(entities.BaseEntity):
|
|
|
1070
1084
|
alpha=1,
|
|
1071
1085
|
save_locally=True,
|
|
1072
1086
|
dataset_lock=False
|
|
1087
|
+
lock_timeout_sec=300
|
|
1073
1088
|
)
|
|
1074
1089
|
"""
|
|
1075
1090
|
filters = self.datasets._bulid_folder_filter(folder_path=folder_path, filters=filters)
|
|
@@ -1085,7 +1100,9 @@ class Dataset(entities.BaseEntity):
|
|
|
1085
1100
|
without_relative_path=without_relative_path,
|
|
1086
1101
|
alpha=alpha,
|
|
1087
1102
|
export_version=export_version,
|
|
1088
|
-
dataset_lock=dataset_lock
|
|
1103
|
+
dataset_lock=dataset_lock,
|
|
1104
|
+
lock_timeout_sec=lock_timeout_sec
|
|
1105
|
+
)
|
|
1089
1106
|
|
|
1090
1107
|
def delete_labels(self, label_names):
|
|
1091
1108
|
"""
|
dtlpy/entities/item.py
CHANGED
|
@@ -454,7 +454,8 @@ class Item(entities.BaseEntity):
|
|
|
454
454
|
annotation_filters=None,
|
|
455
455
|
alpha=1,
|
|
456
456
|
export_version=ExportVersion.V1,
|
|
457
|
-
dataset_lock=False
|
|
457
|
+
dataset_lock=False,
|
|
458
|
+
lock_timeout_sec=None
|
|
458
459
|
):
|
|
459
460
|
"""
|
|
460
461
|
Download dataset by filters.
|
|
@@ -469,6 +470,7 @@ class Item(entities.BaseEntity):
|
|
|
469
470
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
470
471
|
:param bool overwrite: optional - default = False
|
|
471
472
|
:param bool dataset_lock: optional - default = False
|
|
473
|
+
:param int lock_timeout_sec: optional
|
|
472
474
|
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
473
475
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
474
476
|
:param bool with_text: optional - add text to annotations, default = False
|
|
@@ -489,6 +491,7 @@ class Item(entities.BaseEntity):
|
|
|
489
491
|
alpha=1,
|
|
490
492
|
save_locally=True,
|
|
491
493
|
dataset_lock=False
|
|
494
|
+
lock_timeout_sec=300
|
|
492
495
|
)
|
|
493
496
|
"""
|
|
494
497
|
# if dir - concatenate local path and item name
|
|
@@ -523,7 +526,8 @@ class Item(entities.BaseEntity):
|
|
|
523
526
|
with_text=with_text,
|
|
524
527
|
export_version=export_version,
|
|
525
528
|
filters=filters,
|
|
526
|
-
dataset_lock=dataset_lock
|
|
529
|
+
dataset_lock=dataset_lock,
|
|
530
|
+
lock_timeout_sec=lock_timeout_sec)
|
|
527
531
|
|
|
528
532
|
def delete(self):
|
|
529
533
|
"""
|
dtlpy/entities/task.py
CHANGED
|
@@ -329,7 +329,7 @@ class Task:
|
|
|
329
329
|
"""
|
|
330
330
|
Update an Annotation Task
|
|
331
331
|
|
|
332
|
-
:param bool system_metadata:
|
|
332
|
+
:param bool system_metadata: DEPRECATED
|
|
333
333
|
"""
|
|
334
334
|
return self.tasks.update(task=self, system_metadata=system_metadata)
|
|
335
335
|
|
dtlpy/ml/base_model_adapter.py
CHANGED
|
@@ -340,6 +340,9 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
340
340
|
annotation_options=annotation_options,
|
|
341
341
|
annotation_filters=annotation_filters
|
|
342
342
|
)
|
|
343
|
+
if isinstance(ret_list, list) and len(ret_list) == 0:
|
|
344
|
+
raise ValueError(f"No items downloaded for subset {subset}! Cannot train model with empty subset.\n"
|
|
345
|
+
f"Subset {subset} filters: {filters.prepare()}\nAnnotation filters: {annotation_filters.prepare()}")
|
|
343
346
|
|
|
344
347
|
self.convert_from_dtlpy(data_path=data_path, **kwargs)
|
|
345
348
|
return root_path, data_path, output_path
|
dtlpy/repositories/datasets.py
CHANGED
|
@@ -128,7 +128,7 @@ class Datasets:
|
|
|
128
128
|
|
|
129
129
|
@staticmethod
|
|
130
130
|
def _build_payload(filters, include_feature_vectors, include_annotations,
|
|
131
|
-
export_type, annotation_filters, feature_vector_filters, dataset_lock):
|
|
131
|
+
export_type, annotation_filters, feature_vector_filters, dataset_lock, lock_timeout_sec):
|
|
132
132
|
valid_list = [e.value for e in entities.ExportType]
|
|
133
133
|
valid_types = ', '.join(valid_list)
|
|
134
134
|
if export_type not in ['json', 'zip']:
|
|
@@ -157,9 +157,13 @@ class Datasets:
|
|
|
157
157
|
if annotation_filters is not None:
|
|
158
158
|
payload['annotationsQuery'] = annotation_filters.prepare()['filter']
|
|
159
159
|
payload['annotations']['filter'] = True
|
|
160
|
+
|
|
160
161
|
if dataset_lock:
|
|
161
162
|
payload['datasetLock'] = dataset_lock
|
|
162
163
|
|
|
164
|
+
if lock_timeout_sec:
|
|
165
|
+
payload['lockTimeoutSec'] = lock_timeout_sec
|
|
166
|
+
|
|
163
167
|
return payload
|
|
164
168
|
|
|
165
169
|
def _download_exported_item(self, item_id, export_type, local_path=None):
|
|
@@ -631,7 +635,8 @@ class Datasets:
|
|
|
631
635
|
include_annotations: bool = False,
|
|
632
636
|
export_type: entities.ExportType = entities.ExportType.JSON,
|
|
633
637
|
timeout: int = 0,
|
|
634
|
-
dataset_lock: bool = False
|
|
638
|
+
dataset_lock: bool = False,
|
|
639
|
+
lock_timeout_sec: int = None):
|
|
635
640
|
"""
|
|
636
641
|
Export dataset items and annotations.
|
|
637
642
|
|
|
@@ -649,6 +654,7 @@ class Datasets:
|
|
|
649
654
|
:param bool include_feature_vectors: Include item feature vectors in the export
|
|
650
655
|
:param bool include_annotations: Include item annotations in the export
|
|
651
656
|
:param bool dataset_lock: Make dataset readonly during the export
|
|
657
|
+
:param int lock_timeout_sec: Timeout for locking the dataset during export in seconds
|
|
652
658
|
:param entities.ExportType export_type: Type of export ('json' or 'zip')
|
|
653
659
|
:param int timeout: Maximum time in seconds to wait for the export to complete
|
|
654
660
|
:return: Exported item
|
|
@@ -663,11 +669,12 @@ class Datasets:
|
|
|
663
669
|
include_feature_vectors=True,
|
|
664
670
|
include_annotations=True,
|
|
665
671
|
export_type=dl.ExportType.JSON,
|
|
666
|
-
dataset_lock=True
|
|
672
|
+
dataset_lock=True
|
|
673
|
+
lock_timeout_sec=300)
|
|
667
674
|
"""
|
|
668
675
|
dataset_id = self._resolve_dataset_id(dataset, dataset_name, dataset_id)
|
|
669
676
|
payload = self._build_payload(filters, include_feature_vectors, include_annotations,
|
|
670
|
-
export_type, annotation_filters, feature_vector_filters, dataset_lock)
|
|
677
|
+
export_type, annotation_filters, feature_vector_filters, dataset_lock, lock_timeout_sec)
|
|
671
678
|
|
|
672
679
|
success, response = self._client_api.gen_request(req_type='post', path=f'/datasets/{dataset_id}/export',
|
|
673
680
|
json_req=payload)
|
|
@@ -932,7 +939,8 @@ class Datasets:
|
|
|
932
939
|
filter_output_annotations: bool = False,
|
|
933
940
|
alpha: float = None,
|
|
934
941
|
export_version=entities.ExportVersion.V1,
|
|
935
|
-
dataset_lock: bool = False
|
|
942
|
+
dataset_lock: bool = False,
|
|
943
|
+
lock_timeout_sec: int = None
|
|
936
944
|
) -> str:
|
|
937
945
|
"""
|
|
938
946
|
Download dataset's annotations by filters.
|
|
@@ -960,6 +968,7 @@ class Datasets:
|
|
|
960
968
|
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
961
969
|
:return: local_path of the directory where all the downloaded item
|
|
962
970
|
:param bool dataset_lock: optional - default = False
|
|
971
|
+
:param int lock_timeout_sec: optional
|
|
963
972
|
:rtype: str
|
|
964
973
|
|
|
965
974
|
**Example**:
|
|
@@ -973,7 +982,8 @@ class Datasets:
|
|
|
973
982
|
thickness=1,
|
|
974
983
|
with_text=False,
|
|
975
984
|
alpha=1,
|
|
976
|
-
dataset_lock=False
|
|
985
|
+
dataset_lock=False
|
|
986
|
+
lock_timeout_sec=300
|
|
977
987
|
)
|
|
978
988
|
"""
|
|
979
989
|
if annotation_options is None:
|
|
@@ -1034,7 +1044,8 @@ class Datasets:
|
|
|
1034
1044
|
export_png_files=export_png_files,
|
|
1035
1045
|
filter_output_annotations=filter_output_annotations,
|
|
1036
1046
|
export_version=export_version,
|
|
1037
|
-
dataset_lock=dataset_lock
|
|
1047
|
+
dataset_lock=dataset_lock,
|
|
1048
|
+
lock_timeout_sec=lock_timeout_sec
|
|
1038
1049
|
)
|
|
1039
1050
|
if annotation_options:
|
|
1040
1051
|
pages = dataset.items.list(filters=filters)
|
dtlpy/repositories/downloader.py
CHANGED
|
@@ -47,7 +47,8 @@ class Downloader:
|
|
|
47
47
|
filter_output_annotations=False,
|
|
48
48
|
alpha=1,
|
|
49
49
|
export_version=entities.ExportVersion.V1,
|
|
50
|
-
dataset_lock=False
|
|
50
|
+
dataset_lock=False,
|
|
51
|
+
lock_timeout_sec=None
|
|
51
52
|
):
|
|
52
53
|
"""
|
|
53
54
|
Download dataset by filters.
|
|
@@ -74,6 +75,7 @@ class Downloader:
|
|
|
74
75
|
:param alpha: opacity value [0 1], default 1
|
|
75
76
|
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
76
77
|
:param bool dataset_lock: optional - default = False
|
|
78
|
+
:param int lock_timeout_sec: optional
|
|
77
79
|
:return: Output (list)
|
|
78
80
|
"""
|
|
79
81
|
|
|
@@ -198,7 +200,8 @@ class Downloader:
|
|
|
198
200
|
'export_png_files': export_png_files,
|
|
199
201
|
'filter_output_annotations': filter_output_annotations,
|
|
200
202
|
'export_version': export_version,
|
|
201
|
-
'dataset_lock': dataset_lock
|
|
203
|
+
'dataset_lock': dataset_lock,
|
|
204
|
+
'lock_timeout_sec': lock_timeout_sec
|
|
202
205
|
})
|
|
203
206
|
###############
|
|
204
207
|
# downloading #
|
|
@@ -365,7 +368,8 @@ class Downloader:
|
|
|
365
368
|
export_png_files=False,
|
|
366
369
|
filter_output_annotations=False,
|
|
367
370
|
export_version=entities.ExportVersion.V1,
|
|
368
|
-
dataset_lock=False
|
|
371
|
+
dataset_lock=False,
|
|
372
|
+
lock_timeout_sec=None
|
|
369
373
|
):
|
|
370
374
|
"""
|
|
371
375
|
Download annotations json for entire dataset
|
|
@@ -380,6 +384,7 @@ class Downloader:
|
|
|
380
384
|
:param filter_output_annotations: default - False, given an export by filter - determine if to filter out annotations
|
|
381
385
|
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
382
386
|
:param bool dataset_lock: optional - default = False
|
|
387
|
+
:param int lock_timeout_sec: optional
|
|
383
388
|
:return:
|
|
384
389
|
"""
|
|
385
390
|
local_path = os.path.join(local_path, "json")
|
|
@@ -404,6 +409,9 @@ class Downloader:
|
|
|
404
409
|
payload['annotations']['filter'] = filter_output_annotations
|
|
405
410
|
if dataset_lock:
|
|
406
411
|
payload['datasetLock'] = dataset_lock
|
|
412
|
+
|
|
413
|
+
if lock_timeout_sec:
|
|
414
|
+
payload['lockTimeoutSec'] = lock_timeout_sec
|
|
407
415
|
|
|
408
416
|
success, response = dataset._client_api.gen_request(req_type='post',
|
|
409
417
|
path='/datasets/{}/export'.format(dataset.id),
|
|
@@ -737,19 +745,15 @@ class Downloader:
|
|
|
737
745
|
one_file_pbar.update(len(chunk))
|
|
738
746
|
except Exception as err:
|
|
739
747
|
pass
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
748
|
+
|
|
749
|
+
file_validation = True
|
|
750
|
+
if not is_url:
|
|
751
|
+
file_validation, start_point, chunk_resume = self.__get_next_chunk(item=item,
|
|
752
|
+
download_progress=temp_file_path,
|
|
753
|
+
chunk_resume=chunk_resume)
|
|
743
754
|
if file_validation:
|
|
744
755
|
shutil.move(temp_file_path, local_filepath)
|
|
745
756
|
download_done = True
|
|
746
|
-
else:
|
|
747
|
-
if not is_url:
|
|
748
|
-
continue
|
|
749
|
-
else:
|
|
750
|
-
raise PlatformException(
|
|
751
|
-
error="400",
|
|
752
|
-
message='Downloaded file is corrupted. Please try again. If the issue repeats please contact support.')
|
|
753
757
|
except Exception as err:
|
|
754
758
|
if os.path.isfile(temp_file_path):
|
|
755
759
|
os.remove(temp_file_path)
|
|
@@ -55,12 +55,15 @@ class Integrations:
|
|
|
55
55
|
def delete(self,
|
|
56
56
|
integrations_id: str,
|
|
57
57
|
sure: bool = False,
|
|
58
|
-
really: bool = False
|
|
58
|
+
really: bool = False,
|
|
59
|
+
organization_id: str = None
|
|
60
|
+
) -> bool:
|
|
59
61
|
"""
|
|
60
62
|
Delete integrations from the organization.
|
|
61
63
|
|
|
62
64
|
**Prerequisites**: You must be an organization *owner* to delete an integration.
|
|
63
65
|
|
|
66
|
+
:param organization_id: organization id
|
|
64
67
|
:param str integrations_id: integrations id
|
|
65
68
|
:param bool sure: Are you sure you want to delete?
|
|
66
69
|
:param bool really: Really really sure?
|
|
@@ -74,11 +77,12 @@ class Integrations:
|
|
|
74
77
|
project.integrations.delete(integrations_id='integrations_id', sure=True, really=True)
|
|
75
78
|
"""
|
|
76
79
|
if sure and really:
|
|
77
|
-
if self.project is None and self.org is None:
|
|
80
|
+
if self.project is None and self.org is None and organization_id is None:
|
|
78
81
|
raise exceptions.PlatformException(
|
|
79
82
|
error='400',
|
|
80
83
|
message='Must provide an identifier in inputs')
|
|
81
84
|
|
|
85
|
+
if organization_id is None:
|
|
82
86
|
if self.project is not None:
|
|
83
87
|
organization_id = self.project.org.get('id')
|
|
84
88
|
else:
|
|
@@ -101,7 +105,9 @@ class Integrations:
|
|
|
101
105
|
integrations_type: entities.IntegrationType,
|
|
102
106
|
name: str,
|
|
103
107
|
options: dict,
|
|
104
|
-
metadata: dict = None
|
|
108
|
+
metadata: dict = None,
|
|
109
|
+
organization_id: str = None,
|
|
110
|
+
):
|
|
105
111
|
"""
|
|
106
112
|
Create an integration between an external storage and the organization.
|
|
107
113
|
|
|
@@ -123,6 +129,7 @@ class Integrations:
|
|
|
123
129
|
:param str name: integrations name
|
|
124
130
|
:param dict options: dict of storage secrets
|
|
125
131
|
:param dict metadata: metadata
|
|
132
|
+
:param str organization_id: organization id
|
|
126
133
|
:return: success
|
|
127
134
|
:rtype: bool
|
|
128
135
|
|
|
@@ -135,15 +142,16 @@ class Integrations:
|
|
|
135
142
|
options={key: "Access key ID", secret: "Secret access key"})
|
|
136
143
|
"""
|
|
137
144
|
|
|
138
|
-
if self.project is None and self.org is None:
|
|
145
|
+
if self.project is None and self.org is None and organization_id is None:
|
|
139
146
|
raise exceptions.PlatformException(
|
|
140
147
|
error='400',
|
|
141
148
|
message='Must have an organization or project')
|
|
142
149
|
|
|
143
|
-
if
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
150
|
+
if organization_id is None:
|
|
151
|
+
if self.project is not None:
|
|
152
|
+
organization_id = self.project.org.get('id')
|
|
153
|
+
else:
|
|
154
|
+
organization_id = self.org.id
|
|
147
155
|
|
|
148
156
|
url_path = '/orgs/{}/integrations'.format(organization_id)
|
|
149
157
|
payload = {"type": integrations_type.value if isinstance(integrations_type,
|
|
@@ -170,7 +178,9 @@ class Integrations:
|
|
|
170
178
|
new_name: str = None,
|
|
171
179
|
integrations_id: str = None,
|
|
172
180
|
integration: entities.Integration = None,
|
|
173
|
-
new_options: dict = None
|
|
181
|
+
new_options: dict = None,
|
|
182
|
+
organization_id: str = None,
|
|
183
|
+
):
|
|
174
184
|
"""
|
|
175
185
|
Update the integration's name.
|
|
176
186
|
|
|
@@ -180,6 +190,7 @@ class Integrations:
|
|
|
180
190
|
:param str integrations_id: integrations id
|
|
181
191
|
:param Integration integration: integration object
|
|
182
192
|
:param dict new_options: new value
|
|
193
|
+
:param str organization_id: organization id
|
|
183
194
|
:return: Integration object
|
|
184
195
|
:rtype: dtlpy.entities.integration.Integration
|
|
185
196
|
|
|
@@ -198,7 +209,8 @@ class Integrations:
|
|
|
198
209
|
|
|
199
210
|
project.integrations.update(integrations_id='integrations_id', new_options={roleArn: ""})
|
|
200
211
|
"""
|
|
201
|
-
|
|
212
|
+
|
|
213
|
+
if self.project is None and self.org is None and organization_id is None:
|
|
202
214
|
raise exceptions.PlatformException(
|
|
203
215
|
error='400',
|
|
204
216
|
message='Must have an organization or project')
|
|
@@ -207,10 +219,11 @@ class Integrations:
|
|
|
207
219
|
error='400',
|
|
208
220
|
message='Must have an integrations_id or integration')
|
|
209
221
|
|
|
210
|
-
if
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
222
|
+
if organization_id is None:
|
|
223
|
+
if self.project is not None:
|
|
224
|
+
organization_id = self.project.org.get('id')
|
|
225
|
+
else:
|
|
226
|
+
organization_id = self.org.id
|
|
214
227
|
|
|
215
228
|
url_path = '/orgs/{}/integrations/'.format(organization_id)
|
|
216
229
|
payload = dict(integrationId=integrations_id if integrations_id is not None else integration.id)
|
|
@@ -230,13 +243,14 @@ class Integrations:
|
|
|
230
243
|
return entities.Integration.from_json(_json=response.json(), client_api=self._client_api)
|
|
231
244
|
|
|
232
245
|
@_api_reference.add(path='/orgs/{orgId}/integrations/{integrationId}', method='get')
|
|
233
|
-
def get(self, integrations_id: str):
|
|
246
|
+
def get(self, integrations_id: str, organization_id: str = None):
|
|
234
247
|
"""
|
|
235
248
|
Get organization integrations. Use this method to access your integration and be able to use it in your code.
|
|
236
249
|
|
|
237
250
|
**Prerequisites**: You must be an *owner* in the organization.
|
|
238
251
|
|
|
239
252
|
:param str integrations_id: integrations id
|
|
253
|
+
:param str organization_id: organization id
|
|
240
254
|
:return: Integration object
|
|
241
255
|
:rtype: dtlpy.entities.integration.Integration
|
|
242
256
|
|
|
@@ -246,15 +260,16 @@ class Integrations:
|
|
|
246
260
|
|
|
247
261
|
project.integrations.get(integrations_id='integrations_id')
|
|
248
262
|
"""
|
|
249
|
-
if self.project is None and self.org is None:
|
|
263
|
+
if self.project is None and self.org is None and organization_id is None:
|
|
250
264
|
raise exceptions.PlatformException(
|
|
251
265
|
error='400',
|
|
252
266
|
message='Must have an organization or project')
|
|
253
267
|
|
|
254
|
-
if
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
268
|
+
if organization_id is None:
|
|
269
|
+
if self.project is not None:
|
|
270
|
+
organization_id = self.project.org.get('id')
|
|
271
|
+
else:
|
|
272
|
+
organization_id = self.org.id
|
|
258
273
|
|
|
259
274
|
url_path = '/orgs/{}/integrations/{}'.format(organization_id, integrations_id)
|
|
260
275
|
|
|
@@ -265,13 +280,14 @@ class Integrations:
|
|
|
265
280
|
return entities.Integration.from_json(_json=response.json(), client_api=self._client_api)
|
|
266
281
|
|
|
267
282
|
@_api_reference.add(path='/orgs/{orgId}/integrations', method='get')
|
|
268
|
-
def list(self, only_available=False):
|
|
283
|
+
def list(self, only_available=False, organization_id: str = None):
|
|
269
284
|
"""
|
|
270
285
|
List all the organization's integrations with external storage.
|
|
271
286
|
|
|
272
287
|
**Prerequisites**: You must be an *owner* in the organization.
|
|
273
288
|
|
|
274
289
|
:param bool only_available: if True list only the available integrations.
|
|
290
|
+
:param str organization_id: organization id
|
|
275
291
|
:return: groups list
|
|
276
292
|
:rtype: list
|
|
277
293
|
|
|
@@ -281,15 +297,16 @@ class Integrations:
|
|
|
281
297
|
|
|
282
298
|
project.integrations.list(only_available=True)
|
|
283
299
|
"""
|
|
284
|
-
if self.project is None and self.org is None:
|
|
300
|
+
if self.project is None and self.org is None and organization_id is None:
|
|
285
301
|
raise exceptions.PlatformException(
|
|
286
302
|
error='400',
|
|
287
303
|
message='Must have an organization or project')
|
|
288
304
|
|
|
289
|
-
if
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
305
|
+
if organization_id is None:
|
|
306
|
+
if self.project is not None:
|
|
307
|
+
organization_id = self.project.org.get('id')
|
|
308
|
+
else:
|
|
309
|
+
organization_id = self.org.id
|
|
293
310
|
|
|
294
311
|
if only_available:
|
|
295
312
|
url_path = '/orgs/{}/availableIntegrations'.format(organization_id)
|
dtlpy/repositories/items.py
CHANGED
|
@@ -528,7 +528,8 @@ class Items:
|
|
|
528
528
|
filter_output_annotations: bool = False,
|
|
529
529
|
alpha: float = 1,
|
|
530
530
|
export_version=entities.ExportVersion.V1,
|
|
531
|
-
dataset_lock: bool = False
|
|
531
|
+
dataset_lock: bool = False,
|
|
532
|
+
lock_timeout_sec: int = None
|
|
532
533
|
):
|
|
533
534
|
"""
|
|
534
535
|
Download dataset items by filters.
|
|
@@ -549,6 +550,7 @@ class Items:
|
|
|
549
550
|
:param dtlpy.entities.filters.Filters annotation_filters: Filters entity to filter annotations for download
|
|
550
551
|
:param bool overwrite: optional - default = False
|
|
551
552
|
:param bool dataset_lock: optional - default = False
|
|
553
|
+
:param int lock_timeout_sec: optional
|
|
552
554
|
:param bool to_items_folder: Create 'items' folder and download items to it
|
|
553
555
|
:param int thickness: optional - line thickness, if -1 annotation will be filled, default =1
|
|
554
556
|
:param bool with_text: optional - add text to annotations, default = False
|
|
@@ -596,7 +598,8 @@ class Items:
|
|
|
596
598
|
export_png_files=export_png_files,
|
|
597
599
|
filter_output_annotations=filter_output_annotations,
|
|
598
600
|
export_version=export_version,
|
|
599
|
-
dataset_lock=dataset_lock
|
|
601
|
+
dataset_lock=dataset_lock,
|
|
602
|
+
lock_timeout_sec=lock_timeout_sec
|
|
600
603
|
)
|
|
601
604
|
|
|
602
605
|
def upload(
|
dtlpy/repositories/tasks.py
CHANGED
|
@@ -2,6 +2,7 @@ import datetime
|
|
|
2
2
|
import logging
|
|
3
3
|
import json
|
|
4
4
|
from typing import Union, List
|
|
5
|
+
import warnings
|
|
5
6
|
|
|
6
7
|
from .. import exceptions, miscellaneous, entities, repositories, _api_reference
|
|
7
8
|
from ..services.api_client import ApiClient
|
|
@@ -426,7 +427,7 @@ class Tasks:
|
|
|
426
427
|
**Prerequisites**: You must be in the role of an *owner* or *developer* or *annotation manager* who created that task.
|
|
427
428
|
|
|
428
429
|
:param dtlpy.entities.task.Task task: the task object
|
|
429
|
-
:param bool system_metadata:
|
|
430
|
+
:param bool system_metadata: DEPRECATED
|
|
430
431
|
:return: Task object
|
|
431
432
|
:rtype: dtlpy.entities.task.Task
|
|
432
433
|
|
|
@@ -440,7 +441,7 @@ class Tasks:
|
|
|
440
441
|
url = '{}/{}'.format(url, task.id)
|
|
441
442
|
|
|
442
443
|
if system_metadata:
|
|
443
|
-
|
|
444
|
+
warnings.warn("Task system metadata updates are not permitted. Please store custom metadata in 'task.metadata['user']' instead.", DeprecationWarning)
|
|
444
445
|
|
|
445
446
|
success, response = self._client_api.gen_request(req_type='patch',
|
|
446
447
|
path=url,
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
dtlpy/__init__.py,sha256=K2c30sbTNH6bdPFLjabRX-Dh3TsQ33nR9psVGCAoUlw,20687
|
|
2
|
-
dtlpy/__version__.py,sha256=
|
|
2
|
+
dtlpy/__version__.py,sha256=DyXSqQ9xDLitJutTU-b8vJ4YgdU54rdp_AMOoLtwOKE,20
|
|
3
3
|
dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
|
|
4
4
|
dtlpy/new_instance.py,sha256=bHsWS-nNcQ5XWDcC4uASqOqpl1ye97kUacduxXZHO68,9990
|
|
5
5
|
dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
|
|
@@ -57,7 +57,7 @@ dtlpy/entities/codebase.py,sha256=pwRkAq2GV0wvmzshg89IAmE-0I2Wsy_-QNOu8OV8uqc,89
|
|
|
57
57
|
dtlpy/entities/collection.py,sha256=FPPPfIxOsBG1ujORPJVq8uXyF8vhIqC6N4EiI9SJzl0,1160
|
|
58
58
|
dtlpy/entities/command.py,sha256=FtfsO6kQSZqKn-Uo8n2ryGOB01Fgr-g5ewfMCtRMTfw,5247
|
|
59
59
|
dtlpy/entities/compute.py,sha256=6mN4ZRUTf28wc0YtFy7W4mONaoZI0DyIGvwosjQrsAA,14663
|
|
60
|
-
dtlpy/entities/dataset.py,sha256=
|
|
60
|
+
dtlpy/entities/dataset.py,sha256=4zFftsZy7SCLZriQyNa0bVbwg3LcwLlzAbk4fMT1vHg,52943
|
|
61
61
|
dtlpy/entities/directory_tree.py,sha256=Rni6pLSWytR6yeUPgEdCCRfTg_cqLOdUc9uCqz9KT-Q,1186
|
|
62
62
|
dtlpy/entities/dpk.py,sha256=FJVhQKk2fj1cO_4rcE_bIF6QmIQZQWUkBnwTNQNMrfE,17857
|
|
63
63
|
dtlpy/entities/driver.py,sha256=O_QdK1EaLjQyQkmvKsmkNgmvmMb1mPjKnJGxK43KrOA,7197
|
|
@@ -67,7 +67,7 @@ dtlpy/entities/feature_set.py,sha256=niw4MkmrDbD_LWQu1X30uE6U4DCzmFhPTaYeZ6VZDB0
|
|
|
67
67
|
dtlpy/entities/filters.py,sha256=PUmgor77m3CWeUgvCdWMg3Bt5SxHXPVBbN5VmD_dglQ,22683
|
|
68
68
|
dtlpy/entities/gis_item.py,sha256=Uk-wMBxwcHsImjz4qOjP-EyZAohbRzN43kMpCaVjCXU,3982
|
|
69
69
|
dtlpy/entities/integration.py,sha256=Kdy1j6-cJLW8qNmnqCmdg36phi843YDrlMqcMyMfvYk,5875
|
|
70
|
-
dtlpy/entities/item.py,sha256=
|
|
70
|
+
dtlpy/entities/item.py,sha256=vMMrUxdopp94cpNicCS4kTTUBP9eYwNilb1XhG8ZIeE,34839
|
|
71
71
|
dtlpy/entities/label.py,sha256=ycDYavIgKhz806plIX-64c07_TeHpDa-V7LnfFVe4Rg,3869
|
|
72
72
|
dtlpy/entities/links.py,sha256=FAmEwHtsrqKet3c0UHH9u_gHgG6_OwF1-rl4xK7guME,2516
|
|
73
73
|
dtlpy/entities/message.py,sha256=ApJuaKEqxATpXjNYUjGdYPu3ibQzEMo8-LtJ_4xAcPI,5865
|
|
@@ -90,7 +90,7 @@ dtlpy/entities/reflect_dict.py,sha256=2NaSAL-CO0T0FYRYFQlaSpbsoLT2Q18AqdHgQSLX5Y
|
|
|
90
90
|
dtlpy/entities/resource_execution.py,sha256=1HuVV__U4jAUOtOkWlWImnM3Yts8qxMSAkMA9sBhArY,5033
|
|
91
91
|
dtlpy/entities/service.py,sha256=X4rukxywZvb69swxj2C12i7HdmQ_XFuCRKV8Cdl_Dbw,33542
|
|
92
92
|
dtlpy/entities/setting.py,sha256=uXagJHtcCR3nJYClR_AUGZjz_kx3TejPcUZ8ginHFIA,8561
|
|
93
|
-
dtlpy/entities/task.py,sha256=
|
|
93
|
+
dtlpy/entities/task.py,sha256=WOKrZBOu9BaJopZBToFEFOQsEntsfvzKolJYPEUjIeI,19511
|
|
94
94
|
dtlpy/entities/time_series.py,sha256=336jWNckjuSn0G29WJFetB7nBoFAKqs4VH9_IB4m4FE,4017
|
|
95
95
|
dtlpy/entities/trigger.py,sha256=Spf5G3n1PsD3mDntwbAsc-DpEGDlqKgU9ec0Q0HinsQ,14286
|
|
96
96
|
dtlpy/entities/user.py,sha256=hqEzwN6rl1oUTpKOV5eXvw9Z7dtpsiC4TAPSNBmkqcM,3865
|
|
@@ -148,7 +148,7 @@ dtlpy/miscellaneous/list_print.py,sha256=leEg3RodgYfH5t_0JG8VuM8NiesR8sJLK_mRStt
|
|
|
148
148
|
dtlpy/miscellaneous/zipping.py,sha256=GMdPhAeHQXeMS5ClaiKWMJWVYQLBLAaJUWxvdYrL4Ro,5337
|
|
149
149
|
dtlpy/ml/__init__.py,sha256=vPkyXpc9kcWWZ_PxyPEOsjKBJdEbowLkZr8FZIb_OBM,799
|
|
150
150
|
dtlpy/ml/base_feature_extractor_adapter.py,sha256=iiEGYAx0Rdn4K46H_FlKrAv3ebTXHSxNVAmio0BxhaI,1178
|
|
151
|
-
dtlpy/ml/base_model_adapter.py,sha256=
|
|
151
|
+
dtlpy/ml/base_model_adapter.py,sha256=WKpGO5-kLISwXmPy4Dc7e-nIGn3W-tbqJDnhE49WMKA,50930
|
|
152
152
|
dtlpy/ml/metrics.py,sha256=BG2E-1Mvjv2e2No9mIJKVmvzqBvLqytKcw3hA7wVUNc,20037
|
|
153
153
|
dtlpy/ml/predictions_utils.py,sha256=He_84U14oS2Ss7T_-Zj5GDiBZwS-GjMPURUh7u7DjF8,12484
|
|
154
154
|
dtlpy/ml/summary_writer.py,sha256=dehDi8zmGC1sAGyy_3cpSWGXoGQSiQd7bL_Thoo8yIs,2784
|
|
@@ -165,15 +165,15 @@ dtlpy/repositories/collections.py,sha256=C_BPMg128Sl9AG3U4PxgI_2aaehQ2NuehMmzoTa
|
|
|
165
165
|
dtlpy/repositories/commands.py,sha256=i6gQgOmRDG8ixqKU7672H3CvGt8VLT3ihDVfri1eWWc,5610
|
|
166
166
|
dtlpy/repositories/compositions.py,sha256=H417BvlQAiWr5NH2eANFke6CfEO5o7DSvapYpf7v5Hk,2150
|
|
167
167
|
dtlpy/repositories/computes.py,sha256=l0-FS3_8WEGG5tbtIR3ltsZc6MyHVkiYajHTCaeUugk,10156
|
|
168
|
-
dtlpy/repositories/datasets.py,sha256=
|
|
169
|
-
dtlpy/repositories/downloader.py,sha256=
|
|
168
|
+
dtlpy/repositories/datasets.py,sha256=g0ii254YeAjA7B7MWhZoFsB03d70HiskCcPNkbfjC08,58762
|
|
169
|
+
dtlpy/repositories/downloader.py,sha256=p4XXmH8cjYN8o0FJt81S9VqtnOyqj3YEfZKP-l6KGEM,44651
|
|
170
170
|
dtlpy/repositories/dpks.py,sha256=dglvaiSFBvEithhlQ0RAXwzTxoZaICONs-owx3e2nfU,17848
|
|
171
171
|
dtlpy/repositories/drivers.py,sha256=fF0UuHCyBzop8pHfryex23mf0kVFAkqzNdOmwBbaWxY,10204
|
|
172
172
|
dtlpy/repositories/executions.py,sha256=4UoU6bnB3kl5cMuF1eJvDecfZCaB06gKWxPfv6_g1_k,32598
|
|
173
173
|
dtlpy/repositories/feature_sets.py,sha256=UowMDAl_CRefRB5oZzubnsjU_OFgiPPdQXn8q2j4Kuw,9666
|
|
174
174
|
dtlpy/repositories/features.py,sha256=A_RqTJxzjTh-Wbm0uXaoTNyHSfCLbeiH38iB11p2ifY,9915
|
|
175
|
-
dtlpy/repositories/integrations.py,sha256=
|
|
176
|
-
dtlpy/repositories/items.py,sha256=
|
|
175
|
+
dtlpy/repositories/integrations.py,sha256=gSgaVp4MkcdrJMnXVr_fl4xrzhfJba8BFbBJTuJPwXc,18159
|
|
176
|
+
dtlpy/repositories/items.py,sha256=uOukC5sCvdzdIGglYRfYw-1tS11Q9HH4oZFzORnpmk4,39881
|
|
177
177
|
dtlpy/repositories/messages.py,sha256=QU0Psckg6CA_Tlw9AVxqa-Ay1fRM4n269sSIJkH9o7E,3066
|
|
178
178
|
dtlpy/repositories/models.py,sha256=IekNMcnuKVaAVTJf2AJv6YvX5qCd9kkSl4ETPMWP4Zc,38213
|
|
179
179
|
dtlpy/repositories/nodes.py,sha256=xXJm_YA0vDUn0dVvaGeq6ORM0vI3YXvfjuylvGRtkxo,3061
|
|
@@ -188,7 +188,7 @@ dtlpy/repositories/resource_executions.py,sha256=PyzsbdJxz6jf17Gx13GZmqdu6tZo3TT
|
|
|
188
188
|
dtlpy/repositories/schema.py,sha256=kTKDrbwm7BfQnBAK81LpAl9ChNFdyUweSLNazlJJhjk,3953
|
|
189
189
|
dtlpy/repositories/services.py,sha256=2ruoPwyznRwsNtM7YK2vSGQP9jtCHB6WitRo-Z0yB_c,68576
|
|
190
190
|
dtlpy/repositories/settings.py,sha256=HHYSGub5Y6cQ746pBfvlQndsgBj1UoNFupa2otgvsWI,11645
|
|
191
|
-
dtlpy/repositories/tasks.py,sha256=
|
|
191
|
+
dtlpy/repositories/tasks.py,sha256=sBV7SLLwt2QsJkjdEuKLJgIPS34H1b5E2rdFQb1n1Wo,50160
|
|
192
192
|
dtlpy/repositories/times_series.py,sha256=m-bKFEgiZ13yQNelDjBfeXMUy_HgsPD_JAHj1GVx9fU,11420
|
|
193
193
|
dtlpy/repositories/triggers.py,sha256=izdNyCN1gDc5uo7AXntso0HSMTDIzGFUp-dSEz8cn_U,21990
|
|
194
194
|
dtlpy/repositories/upload_element.py,sha256=R2KWIXmkp_dMAIr81tu3Y_VRfldj0ju8__V28ombkcg,10677
|
|
@@ -224,9 +224,9 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
|
|
|
224
224
|
dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
|
|
225
225
|
dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
|
|
226
226
|
dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
|
|
227
|
-
dtlpy-1.
|
|
228
|
-
dtlpy-1.
|
|
229
|
-
dtlpy-1.
|
|
227
|
+
dtlpy-1.107.8.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
|
|
228
|
+
dtlpy-1.107.8.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
|
|
229
|
+
dtlpy-1.107.8.data/scripts/dlp.py,sha256=tEokRaDINISXnq8yNx_CBw1qM5uwjYiZoJOYGqWB3RU,4267
|
|
230
230
|
tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
231
231
|
tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
232
232
|
tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
|
|
@@ -234,9 +234,9 @@ tests/assets/models_flow/main.py,sha256=vnDKyVZaae2RFpvwS22Hzi6Dt2LJerH4yQrmKtaT
|
|
|
234
234
|
tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
|
|
235
235
|
tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
236
236
|
tests/features/environment.py,sha256=TMeUzSZkksHqbxNBDLk-LYBMD4G5dMo4ZLZXPwQImVE,18751
|
|
237
|
-
dtlpy-1.
|
|
238
|
-
dtlpy-1.
|
|
239
|
-
dtlpy-1.
|
|
240
|
-
dtlpy-1.
|
|
241
|
-
dtlpy-1.
|
|
242
|
-
dtlpy-1.
|
|
237
|
+
dtlpy-1.107.8.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
238
|
+
dtlpy-1.107.8.dist-info/METADATA,sha256=VTZNTqnNeuyZQM3xubZ68omm1J5fBp-e13CS7GRjjVA,3019
|
|
239
|
+
dtlpy-1.107.8.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
|
240
|
+
dtlpy-1.107.8.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
|
|
241
|
+
dtlpy-1.107.8.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
|
|
242
|
+
dtlpy-1.107.8.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|