supervisely 6.73.301__py3-none-any.whl → 6.73.303__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of supervisely might be problematic. Click here for more details.
- supervisely/api/task_api.py +289 -5
- supervisely/convert/__init__.py +19 -3
- supervisely/convert/image/coco/coco_helper.py +177 -27
- supervisely/convert/image/pascal_voc/pascal_voc_helper.py +96 -12
- supervisely/convert/image/yolo/yolo_helper.py +84 -1
- supervisely/nn/artifacts/artifacts.py +133 -62
- supervisely/nn/artifacts/detectron2.py +6 -0
- supervisely/nn/artifacts/hrda.py +4 -0
- supervisely/nn/artifacts/mmclassification.py +4 -0
- supervisely/nn/artifacts/mmdetection.py +9 -1
- supervisely/nn/artifacts/mmsegmentation.py +4 -0
- supervisely/nn/artifacts/ritm.py +4 -0
- supervisely/nn/artifacts/rtdetr.py +4 -0
- supervisely/nn/artifacts/unet.py +4 -0
- supervisely/nn/artifacts/yolov5.py +7 -0
- supervisely/nn/artifacts/yolov8.py +5 -1
- supervisely/nn/experiments.py +85 -2
- supervisely/nn/inference/inference.py +11 -4
- supervisely/nn/training/train_app.py +1 -1
- {supervisely-6.73.301.dist-info → supervisely-6.73.303.dist-info}/METADATA +1 -1
- {supervisely-6.73.301.dist-info → supervisely-6.73.303.dist-info}/RECORD +25 -25
- {supervisely-6.73.301.dist-info → supervisely-6.73.303.dist-info}/LICENSE +0 -0
- {supervisely-6.73.301.dist-info → supervisely-6.73.303.dist-info}/WHEEL +0 -0
- {supervisely-6.73.301.dist-info → supervisely-6.73.303.dist-info}/entry_points.txt +0 -0
- {supervisely-6.73.301.dist-info → supervisely-6.73.303.dist-info}/top_level.txt +0 -0
|
@@ -7,17 +7,11 @@ import numpy as np
|
|
|
7
7
|
from PIL import Image
|
|
8
8
|
from tqdm import tqdm
|
|
9
9
|
|
|
10
|
-
from supervisely import
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
ObjClassCollection,
|
|
16
|
-
Project,
|
|
17
|
-
ProjectMeta,
|
|
18
|
-
generate_free_name,
|
|
19
|
-
logger,
|
|
20
|
-
)
|
|
10
|
+
from supervisely._utils import generate_free_name
|
|
11
|
+
from supervisely.annotation.annotation import Annotation
|
|
12
|
+
from supervisely.annotation.label import Label
|
|
13
|
+
from supervisely.annotation.obj_class import ObjClass
|
|
14
|
+
from supervisely.annotation.obj_class_collection import ObjClassCollection
|
|
21
15
|
from supervisely.convert.image.image_helper import validate_image_bounds
|
|
22
16
|
from supervisely.geometry.bitmap import Bitmap
|
|
23
17
|
from supervisely.geometry.polygon import Polygon
|
|
@@ -26,6 +20,9 @@ from supervisely.imaging.color import generate_rgb
|
|
|
26
20
|
from supervisely.imaging.image import read
|
|
27
21
|
from supervisely.io.fs import file_exists, get_file_ext, get_file_name
|
|
28
22
|
from supervisely.io.json import load_json_file
|
|
23
|
+
from supervisely.project.project import Dataset, OpenMode, Project
|
|
24
|
+
from supervisely.project.project_meta import ProjectMeta
|
|
25
|
+
from supervisely.sly_logger import logger
|
|
29
26
|
from supervisely.task.progress import tqdm_sly
|
|
30
27
|
|
|
31
28
|
MASKS_EXTENSION = ".png"
|
|
@@ -374,7 +371,7 @@ def sly_ds_to_pascal_voc(
|
|
|
374
371
|
train_val_split_coef: float = 0.8,
|
|
375
372
|
log_progress: bool = False,
|
|
376
373
|
progress_cb: Optional[Union[tqdm, Callable]] = None,
|
|
377
|
-
) ->
|
|
374
|
+
) -> None:
|
|
378
375
|
"""
|
|
379
376
|
Convert Supervisely dataset to Pascal VOC format.
|
|
380
377
|
|
|
@@ -612,6 +609,9 @@ def sly_project_to_pascal_voc(
|
|
|
612
609
|
# Convert Project to Pascal VOC format
|
|
613
610
|
sly.Project(project_directory).to_pascal_voc(log_progress=True)
|
|
614
611
|
"""
|
|
612
|
+
if isinstance(project, str):
|
|
613
|
+
project = Project(project, mode=OpenMode.READ)
|
|
614
|
+
|
|
615
615
|
if dest_dir is None:
|
|
616
616
|
dest_dir = project.directory
|
|
617
617
|
|
|
@@ -636,3 +636,87 @@ def sly_project_to_pascal_voc(
|
|
|
636
636
|
)
|
|
637
637
|
logger.info(f"Dataset '{dataset.short_name}' has been converted to Pascal VOC format.")
|
|
638
638
|
logger.info(f"Project '{project.name}' has been converted to Pascal VOC format.")
|
|
639
|
+
|
|
640
|
+
|
|
641
|
+
def to_pascal_voc(
|
|
642
|
+
input_data: Union[Project, Dataset, str],
|
|
643
|
+
dest_dir: Optional[str] = None,
|
|
644
|
+
meta: Optional[ProjectMeta] = None,
|
|
645
|
+
train_val_split_coef: float = 0.8,
|
|
646
|
+
log_progress: bool = True,
|
|
647
|
+
progress_cb: Optional[Union[tqdm, Callable]] = None,
|
|
648
|
+
) -> None:
|
|
649
|
+
"""
|
|
650
|
+
Universal function to convert Supervisely project or dataset to Pascal VOC format.
|
|
651
|
+
Note:
|
|
652
|
+
- For better compatibility, please pass named arguments explicitly. Otherwise, the function may not work as expected.
|
|
653
|
+
You can use the dedicated functions for each data type:
|
|
654
|
+
|
|
655
|
+
- :func:`sly.convert.sly_project_to_pascal_voc()`
|
|
656
|
+
- :func:`sly.convert.sly_ds_to_pascal_voc()`
|
|
657
|
+
|
|
658
|
+
- If the input_data is a Project, the dest_dir parameters are required.
|
|
659
|
+
- If the input_data is a Dataset, the meta and dest_dir parameters are required.
|
|
660
|
+
|
|
661
|
+
:param input_data: Input data to convert (Project, Dataset, or path to the project/dataset directory).
|
|
662
|
+
:type input_data: :class:`Project<supervisely.project.project.Project>`, :class:`Dataset<supervisely.dataset.dataset.Dataset>`, or :class:`str`
|
|
663
|
+
:param dest_dir: Destination directory.
|
|
664
|
+
:type dest_dir: :class:`str`, optional
|
|
665
|
+
:param meta: Project meta information (required for Dataset conversion).
|
|
666
|
+
:type meta: :class:`ProjectMeta<supervisely.project.project_meta.ProjectMeta>`, optional
|
|
667
|
+
:param train_val_split_coef: Coefficient for splitting images into train and validation sets.
|
|
668
|
+
:type train_val_split_coef: :class:`float`, optional
|
|
669
|
+
:param log_progress: Show uploading progress bar.
|
|
670
|
+
:type log_progress: :class:`bool`
|
|
671
|
+
:param progress_cb: Function for tracking conversion progress (for all items in the project).
|
|
672
|
+
:type progress_cb: callable, optional
|
|
673
|
+
:return: None
|
|
674
|
+
:rtype: NoneType
|
|
675
|
+
|
|
676
|
+
:Usage example:
|
|
677
|
+
|
|
678
|
+
.. code-block:: python
|
|
679
|
+
|
|
680
|
+
import supervisely as sly
|
|
681
|
+
|
|
682
|
+
# Local folder with Project
|
|
683
|
+
project_directory = "/home/admin/work/supervisely/source/project"
|
|
684
|
+
project_fs = sly.Project(project_directory, sly.OpenMode.READ)
|
|
685
|
+
|
|
686
|
+
# Convert Project to Pascal VOC format
|
|
687
|
+
sly.convert.to_pascal_voc(project_directory, dest_dir="./pascal_voc")
|
|
688
|
+
# or
|
|
689
|
+
sly.convert.to_pascal_voc(project_fs, dest_dir="./pascal_voc")
|
|
690
|
+
|
|
691
|
+
# Convert Dataset to Pascal VOC format
|
|
692
|
+
dataset: sly.Dataset = project_fs.datasets.get("dataset_name")
|
|
693
|
+
sly.convert.to_pascal_voc(dataset, dest_dir="./pascal_voc")
|
|
694
|
+
"""
|
|
695
|
+
if isinstance(input_data, str):
|
|
696
|
+
try:
|
|
697
|
+
input_data = Project(input_data, mode=OpenMode.READ)
|
|
698
|
+
except Exception as e:
|
|
699
|
+
try:
|
|
700
|
+
input_data = Dataset(input_data, mode=OpenMode.READ)
|
|
701
|
+
except Exception as e:
|
|
702
|
+
raise ValueError("Please check the path or the input data.")
|
|
703
|
+
|
|
704
|
+
if isinstance(input_data, (Project, str)):
|
|
705
|
+
return sly_project_to_pascal_voc(
|
|
706
|
+
project=input_data,
|
|
707
|
+
dest_dir=dest_dir,
|
|
708
|
+
train_val_split_coef=train_val_split_coef,
|
|
709
|
+
log_progress=log_progress,
|
|
710
|
+
progress_cb=progress_cb,
|
|
711
|
+
)
|
|
712
|
+
elif isinstance(input_data, Dataset):
|
|
713
|
+
return sly_ds_to_pascal_voc(
|
|
714
|
+
dataset=input_data,
|
|
715
|
+
meta=meta,
|
|
716
|
+
dest_dir=dest_dir,
|
|
717
|
+
train_val_split_coef=train_val_split_coef,
|
|
718
|
+
log_progress=log_progress,
|
|
719
|
+
progress_cb=progress_cb,
|
|
720
|
+
)
|
|
721
|
+
else:
|
|
722
|
+
raise ValueError(f"Unsupported input data type: {type(input_data)}")
|
|
@@ -474,7 +474,7 @@ def label_to_yolo_lines(
|
|
|
474
474
|
max_kpts_count=max_kpts_count,
|
|
475
475
|
)
|
|
476
476
|
else:
|
|
477
|
-
raise ValueError(f"Unsupported
|
|
477
|
+
raise ValueError(f"Unsupported task type: {task_type}")
|
|
478
478
|
|
|
479
479
|
if yolo_line is not None:
|
|
480
480
|
lines.append(yolo_line)
|
|
@@ -657,3 +657,86 @@ def sly_project_to_yolo(
|
|
|
657
657
|
)
|
|
658
658
|
logger.info(f"Dataset '{dataset.short_name}' has been converted to YOLO format.")
|
|
659
659
|
logger.info(f"Project '{project.name}' has been converted to YOLO format.")
|
|
660
|
+
|
|
661
|
+
|
|
662
|
+
def to_yolo(
|
|
663
|
+
input_data: Union[Project, Dataset, str],
|
|
664
|
+
dest_dir: Optional[str] = None,
|
|
665
|
+
task_type: Literal["detection", "segmentation", "pose"] = "detection",
|
|
666
|
+
meta: Optional[ProjectMeta] = None,
|
|
667
|
+
log_progress: bool = True,
|
|
668
|
+
progress_cb: Optional[Callable] = None,
|
|
669
|
+
) -> Union[None, str]:
|
|
670
|
+
"""
|
|
671
|
+
Universal function to convert Supervisely project or dataset to YOLO format.
|
|
672
|
+
Note:
|
|
673
|
+
- For better compatibility, please pass named arguments explicitly. Otherwise, the function may not work as expected.
|
|
674
|
+
You can use the dedicated functions for each data type:
|
|
675
|
+
|
|
676
|
+
- :func:`sly.convert.sly_project_to_yolo()`
|
|
677
|
+
- :func:`sly.convert.sly_ds_to_yolo()`
|
|
678
|
+
|
|
679
|
+
- If the input_data is a Project, the dest_dir parameters are required.
|
|
680
|
+
- If the input_data is a Dataset, the meta and dest_dir parameters are required.
|
|
681
|
+
|
|
682
|
+
:param input_data: Supervisely project or dataset, or path to the directory with the project/dataset.
|
|
683
|
+
:type input_data: :class:`supervisely.project.project.Project`, :class:`supervisely.project.dataset.Dataset`, or :class:`str`
|
|
684
|
+
:param dest_dir: Destination directory.
|
|
685
|
+
:type dest_dir: :class:`str`, optional
|
|
686
|
+
:param task_type: Task type.
|
|
687
|
+
:type task_type: :class:`str`, optional
|
|
688
|
+
:param meta: Project meta (required for Dataset conversion).
|
|
689
|
+
:type meta: :class:`supervisely.project.project_meta.ProjectMeta`, optional
|
|
690
|
+
:param log_progress: Show uploading progress bar.
|
|
691
|
+
:type log_progress: :class:`bool`
|
|
692
|
+
:param progress_cb: Function for tracking conversion progress (for all items in the project).
|
|
693
|
+
:type progress_cb: callable, optional
|
|
694
|
+
:return: None, list of YOLO lines, or path to the destination directory.
|
|
695
|
+
:rtype: NoneType, list, str
|
|
696
|
+
|
|
697
|
+
:Usage example:
|
|
698
|
+
|
|
699
|
+
.. code-block:: python
|
|
700
|
+
|
|
701
|
+
import supervisely as sly
|
|
702
|
+
|
|
703
|
+
# Local folder with Project
|
|
704
|
+
project_directory = "/home/admin/work/supervisely/source/project"
|
|
705
|
+
project_fs = sly.Project(project_directory, sly.OpenMode.READ)
|
|
706
|
+
|
|
707
|
+
# Convert Project to YOLO format
|
|
708
|
+
sly.convert.to_yolo(project_directory, dest_dir="./yolo")
|
|
709
|
+
# or
|
|
710
|
+
sly.convert.to_yolo(project_fs, dest_dir="./yolo")
|
|
711
|
+
|
|
712
|
+
# Convert Dataset to YOLO format
|
|
713
|
+
dataset: sly.Dataset = project_fs.datasets.get("dataset_name")
|
|
714
|
+
sly.convert.to_yolo(dataset, dest_dir="./yolo", meta=project_fs.meta)
|
|
715
|
+
"""
|
|
716
|
+
if isinstance(input_data, str):
|
|
717
|
+
try:
|
|
718
|
+
input_data = Project(input_data, mode=OpenMode.READ)
|
|
719
|
+
except Exception:
|
|
720
|
+
try:
|
|
721
|
+
input_data = Dataset(input_data, mode=OpenMode.READ)
|
|
722
|
+
except Exception:
|
|
723
|
+
raise ValueError("Please check the path or the input data.")
|
|
724
|
+
if isinstance(input_data, Project):
|
|
725
|
+
return sly_project_to_yolo(
|
|
726
|
+
project=input_data,
|
|
727
|
+
dest_dir=dest_dir,
|
|
728
|
+
task_type=task_type,
|
|
729
|
+
log_progress=log_progress,
|
|
730
|
+
progress_cb=progress_cb,
|
|
731
|
+
)
|
|
732
|
+
elif isinstance(input_data, Dataset):
|
|
733
|
+
return sly_ds_to_yolo(
|
|
734
|
+
dataset=input_data,
|
|
735
|
+
meta=meta,
|
|
736
|
+
dest_dir=dest_dir,
|
|
737
|
+
task_type=task_type,
|
|
738
|
+
log_progress=log_progress,
|
|
739
|
+
progress_cb=progress_cb,
|
|
740
|
+
)
|
|
741
|
+
else:
|
|
742
|
+
raise ValueError("Unsupported input type. Only Project or Dataset are supported.")
|
|
@@ -3,11 +3,12 @@ import string
|
|
|
3
3
|
from abc import abstractmethod
|
|
4
4
|
from collections import defaultdict
|
|
5
5
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
6
|
+
from dataclasses import fields
|
|
6
7
|
from datetime import datetime
|
|
7
8
|
from json import JSONDecodeError
|
|
8
9
|
from os.path import dirname, join
|
|
9
10
|
from time import time
|
|
10
|
-
from typing import Any, Dict, List, Literal, NamedTuple
|
|
11
|
+
from typing import Any, Dict, List, Literal, NamedTuple, Union
|
|
11
12
|
|
|
12
13
|
import requests
|
|
13
14
|
|
|
@@ -55,6 +56,9 @@ class BaseTrainArtifacts:
|
|
|
55
56
|
self._metadata_file_name: str = "train_info.json"
|
|
56
57
|
|
|
57
58
|
self._app_name: str = None
|
|
59
|
+
self._slug = None
|
|
60
|
+
self._serve_app_name = None
|
|
61
|
+
self._serve_slug = None
|
|
58
62
|
self._framework_name: str = None
|
|
59
63
|
self._framework_folder: str = None
|
|
60
64
|
self._weights_folder: str = None
|
|
@@ -63,6 +67,7 @@ class BaseTrainArtifacts:
|
|
|
63
67
|
self._config_file: str = None
|
|
64
68
|
self._pattern: str = None
|
|
65
69
|
self._available_task_types: List[str] = []
|
|
70
|
+
self._require_runtime = False
|
|
66
71
|
|
|
67
72
|
@property
|
|
68
73
|
def team_id(self) -> int:
|
|
@@ -94,6 +99,36 @@ class BaseTrainArtifacts:
|
|
|
94
99
|
"""
|
|
95
100
|
return self._app_name
|
|
96
101
|
|
|
102
|
+
@property
|
|
103
|
+
def slug(self):
|
|
104
|
+
"""
|
|
105
|
+
Train app slug.
|
|
106
|
+
|
|
107
|
+
:return: Train app slug.
|
|
108
|
+
:rtype: str
|
|
109
|
+
"""
|
|
110
|
+
return self._slug
|
|
111
|
+
|
|
112
|
+
@property
|
|
113
|
+
def serve_app_name(self):
|
|
114
|
+
"""
|
|
115
|
+
Serve application name.
|
|
116
|
+
|
|
117
|
+
:return: The serve application name.
|
|
118
|
+
:rtype: str
|
|
119
|
+
"""
|
|
120
|
+
return self._serve_app_name
|
|
121
|
+
|
|
122
|
+
@property
|
|
123
|
+
def serve_slug(self):
|
|
124
|
+
"""
|
|
125
|
+
Serve app slug.
|
|
126
|
+
|
|
127
|
+
:return: Serve app slug.
|
|
128
|
+
:rtype: str
|
|
129
|
+
"""
|
|
130
|
+
return self._serve_slug
|
|
131
|
+
|
|
97
132
|
@property
|
|
98
133
|
def framework_name(self):
|
|
99
134
|
"""
|
|
@@ -164,6 +199,16 @@ class BaseTrainArtifacts:
|
|
|
164
199
|
"""
|
|
165
200
|
return self._pattern
|
|
166
201
|
|
|
202
|
+
@property
|
|
203
|
+
def require_runtime(self):
|
|
204
|
+
"""
|
|
205
|
+
Whether providing runtime is required for the framework.
|
|
206
|
+
|
|
207
|
+
:return: True if runtime is required, False otherwise.
|
|
208
|
+
:rtype: bool
|
|
209
|
+
"""
|
|
210
|
+
return self._require_runtime
|
|
211
|
+
|
|
167
212
|
def is_valid_artifacts_path(self, path):
|
|
168
213
|
"""
|
|
169
214
|
Check if the provided path is valid and follows specified session path pattern.
|
|
@@ -531,68 +576,68 @@ class BaseTrainArtifacts:
|
|
|
531
576
|
logger.debug(f"Listing time: '{format(end_time - start_time, '.6f')}' sec")
|
|
532
577
|
return train_infos
|
|
533
578
|
|
|
534
|
-
def
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
579
|
+
def convert_train_to_experiment_info(
|
|
580
|
+
self, train_info: TrainInfo
|
|
581
|
+
) -> Union[ExperimentInfo, None]:
|
|
582
|
+
try:
|
|
583
|
+
checkpoints = []
|
|
584
|
+
for chk in train_info.checkpoints:
|
|
585
|
+
if self.weights_folder:
|
|
586
|
+
checkpoints.append(join(self.weights_folder, chk.name))
|
|
587
|
+
else:
|
|
588
|
+
checkpoints.append(chk.name)
|
|
589
|
+
|
|
590
|
+
best_checkpoint = next(
|
|
591
|
+
(chk.name for chk in train_info.checkpoints if "best" in chk.name), None
|
|
592
|
+
)
|
|
593
|
+
if not best_checkpoint and checkpoints:
|
|
594
|
+
best_checkpoint = get_file_name_with_ext(checkpoints[-1])
|
|
595
|
+
|
|
596
|
+
task_info = self._api.task.get_info_by_id(train_info.task_id)
|
|
597
|
+
workspace_id = task_info["workspaceId"]
|
|
598
|
+
|
|
599
|
+
project = self._api.project.get_info_by_name(workspace_id, train_info.project_name)
|
|
600
|
+
project_id = project.id if project else None
|
|
601
|
+
|
|
602
|
+
model_files = {}
|
|
603
|
+
if train_info.config_path:
|
|
604
|
+
model_files["config"] = self.get_config_path(train_info.artifacts_folder).replace(
|
|
605
|
+
train_info.artifacts_folder, ""
|
|
549
606
|
)
|
|
550
|
-
if not best_checkpoint and checkpoints:
|
|
551
|
-
best_checkpoint = get_file_name_with_ext(checkpoints[-1])
|
|
552
|
-
|
|
553
|
-
task_info = api.task.get_info_by_id(train_info.task_id)
|
|
554
|
-
workspace_id = task_info["workspaceId"]
|
|
555
|
-
|
|
556
|
-
project = api.project.get_info_by_name(workspace_id, train_info.project_name)
|
|
557
|
-
project_id = project.id if project else None
|
|
558
|
-
|
|
559
|
-
model_files = {}
|
|
560
|
-
if train_info.config_path:
|
|
561
|
-
model_files["config"] = self.get_config_path(
|
|
562
|
-
train_info.artifacts_folder
|
|
563
|
-
).replace(train_info.artifacts_folder, "")
|
|
564
|
-
|
|
565
|
-
input_datetime = task_info["startedAt"]
|
|
566
|
-
parsed_datetime = datetime.strptime(input_datetime, "%Y-%m-%dT%H:%M:%S.%fZ")
|
|
567
|
-
date_time = parsed_datetime.strftime("%Y-%m-%d %H:%M:%S")
|
|
568
|
-
|
|
569
|
-
experiment_info_data = {
|
|
570
|
-
"experiment_name": f"Unknown {self.framework_name} experiment",
|
|
571
|
-
"framework_name": self.framework_name,
|
|
572
|
-
"model_name": f"Unknown {self.framework_name} model",
|
|
573
|
-
"task_type": train_info.task_type,
|
|
574
|
-
"project_id": project_id,
|
|
575
|
-
"task_id": train_info.task_id,
|
|
576
|
-
"model_files": model_files,
|
|
577
|
-
"checkpoints": checkpoints,
|
|
578
|
-
"best_checkpoint": best_checkpoint,
|
|
579
|
-
"artifacts_dir": train_info.artifacts_folder,
|
|
580
|
-
"datetime": date_time,
|
|
581
|
-
}
|
|
582
|
-
|
|
583
|
-
experiment_info_fields = {
|
|
584
|
-
field.name
|
|
585
|
-
for field in ExperimentInfo.__dataclass_fields__.values() # pylint: disable=no-member
|
|
586
|
-
}
|
|
587
|
-
for field in experiment_info_fields:
|
|
588
|
-
if field not in experiment_info_data:
|
|
589
|
-
experiment_info_data[field] = None
|
|
590
|
-
|
|
591
|
-
return ExperimentInfo(**experiment_info_data)
|
|
592
|
-
except Exception as e:
|
|
593
|
-
logger.debug(f"Failed to build experiment info: {e}")
|
|
594
|
-
return None
|
|
595
607
|
|
|
608
|
+
input_datetime = task_info["startedAt"]
|
|
609
|
+
parsed_datetime = datetime.strptime(input_datetime, "%Y-%m-%dT%H:%M:%S.%fZ")
|
|
610
|
+
date_time = parsed_datetime.strftime("%Y-%m-%d %H:%M:%S")
|
|
611
|
+
|
|
612
|
+
experiment_info_data = {
|
|
613
|
+
"experiment_name": f"Unknown {self.framework_name} experiment",
|
|
614
|
+
"framework_name": self.framework_name,
|
|
615
|
+
"model_name": f"Unknown {self.framework_name} model",
|
|
616
|
+
"task_type": train_info.task_type,
|
|
617
|
+
"project_id": project_id,
|
|
618
|
+
"task_id": train_info.task_id,
|
|
619
|
+
"model_files": model_files,
|
|
620
|
+
"checkpoints": checkpoints,
|
|
621
|
+
"best_checkpoint": best_checkpoint,
|
|
622
|
+
"artifacts_dir": train_info.artifacts_folder,
|
|
623
|
+
"datetime": date_time,
|
|
624
|
+
}
|
|
625
|
+
|
|
626
|
+
experiment_info_fields = {
|
|
627
|
+
field.name
|
|
628
|
+
for field in ExperimentInfo.__dataclass_fields__.values() # pylint: disable=no-member
|
|
629
|
+
}
|
|
630
|
+
for field in experiment_info_fields:
|
|
631
|
+
if field not in experiment_info_data:
|
|
632
|
+
experiment_info_data[field] = None
|
|
633
|
+
return ExperimentInfo(**experiment_info_data)
|
|
634
|
+
except Exception as e:
|
|
635
|
+
logger.debug(f"Failed to build experiment info: {e}")
|
|
636
|
+
return None
|
|
637
|
+
|
|
638
|
+
def get_list_experiment_info(
|
|
639
|
+
self, sort: Literal["desc", "asc"] = "desc"
|
|
640
|
+
) -> List[ExperimentInfo]:
|
|
596
641
|
train_infos = self.get_list(sort)
|
|
597
642
|
|
|
598
643
|
# Sync version
|
|
@@ -607,7 +652,7 @@ class BaseTrainArtifacts:
|
|
|
607
652
|
with ThreadPoolExecutor() as executor:
|
|
608
653
|
experiment_infos = list(
|
|
609
654
|
executor.map(
|
|
610
|
-
lambda t:
|
|
655
|
+
lambda t: self.convert_train_to_experiment_info(t),
|
|
611
656
|
train_infos,
|
|
612
657
|
)
|
|
613
658
|
)
|
|
@@ -621,3 +666,29 @@ class BaseTrainArtifacts:
|
|
|
621
666
|
:rtype: List[str]
|
|
622
667
|
"""
|
|
623
668
|
return self._available_task_types
|
|
669
|
+
|
|
670
|
+
def get_info_by_artifacts_dir(
|
|
671
|
+
self,
|
|
672
|
+
artifacts_dir: str,
|
|
673
|
+
return_type: Literal["train_info", "experiment_info"] = "train_info",
|
|
674
|
+
) -> Union[TrainInfo, ExperimentInfo, None]:
|
|
675
|
+
"""
|
|
676
|
+
Get training info by artifacts directory.
|
|
677
|
+
|
|
678
|
+
:param artifacts_dir: The artifacts directory.
|
|
679
|
+
:type artifacts_dir: str
|
|
680
|
+
:param return_type: The return type, either "train_info" or "experiment_info". Default is "experiment_info".
|
|
681
|
+
:type return_type: Literal["train_info", "experiment_info"]
|
|
682
|
+
:return: The training info.
|
|
683
|
+
:rtype: TrainInfo
|
|
684
|
+
"""
|
|
685
|
+
for train_info in self.get_list():
|
|
686
|
+
if train_info.artifacts_folder == artifacts_dir:
|
|
687
|
+
if return_type == "train_info":
|
|
688
|
+
return train_info
|
|
689
|
+
else:
|
|
690
|
+
return self.convert_train_to_experiment_info(train_info)
|
|
691
|
+
|
|
692
|
+
# load_custom_checkpoint
|
|
693
|
+
# inference
|
|
694
|
+
# fix docstrings :param: x -> :param x:
|
|
@@ -10,6 +10,11 @@ class Detectron2(BaseTrainArtifacts):
|
|
|
10
10
|
super().__init__(team_id)
|
|
11
11
|
|
|
12
12
|
self._app_name = "Train Detectron2"
|
|
13
|
+
self._slug = "supervisely-ecosystem/detectron2/supervisely/train"
|
|
14
|
+
self._serve_app_name = "Serve Detectron2"
|
|
15
|
+
self._serve_slug = (
|
|
16
|
+
"supervisely-ecosystem/detectron2/supervisely/instance_segmentation/serve"
|
|
17
|
+
)
|
|
13
18
|
self._framework_name = "Detectron2"
|
|
14
19
|
self._framework_folder = "/detectron2"
|
|
15
20
|
self._weights_folder = "checkpoints"
|
|
@@ -19,6 +24,7 @@ class Detectron2(BaseTrainArtifacts):
|
|
|
19
24
|
self._config_file = "model_config.yaml"
|
|
20
25
|
self._pattern = re_compile(r"^/detectron2/\d+_[^/]+/?$")
|
|
21
26
|
self._available_task_types: List[str] = ["instance segmentation"]
|
|
27
|
+
self._require_runtime = False
|
|
22
28
|
|
|
23
29
|
def get_task_id(self, artifacts_folder: str) -> str:
|
|
24
30
|
parts = artifacts_folder.split("/")
|
supervisely/nn/artifacts/hrda.py
CHANGED
|
@@ -10,12 +10,16 @@ class HRDA(BaseTrainArtifacts):
|
|
|
10
10
|
raise NotImplementedError
|
|
11
11
|
# super().__init__(team_id)
|
|
12
12
|
# self._app_name = "Train HRDA"
|
|
13
|
+
# self._serve_app_name = None
|
|
14
|
+
# self._slug = None
|
|
15
|
+
# self._serve_slug = None
|
|
13
16
|
# self._framework_folder = "/HRDA"
|
|
14
17
|
# self._weights_folder = None
|
|
15
18
|
# self._task_type = "semantic segmentation"
|
|
16
19
|
# self._weights_ext = ".pth"
|
|
17
20
|
# self._config_file = "config.py"
|
|
18
21
|
# self._available_task_types: List[str] = ["semantic segmentation"]
|
|
22
|
+
# self._require_runtime = False
|
|
19
23
|
|
|
20
24
|
def get_task_id(self, artifacts_folder: str) -> str:
|
|
21
25
|
raise NotImplementedError
|
|
@@ -10,6 +10,9 @@ class MMClassification(BaseTrainArtifacts):
|
|
|
10
10
|
super().__init__(team_id)
|
|
11
11
|
|
|
12
12
|
self._app_name = "Train MMClassification"
|
|
13
|
+
self._slug = "supervisely-ecosystem/mmclassification/supervisely/train"
|
|
14
|
+
self._serve_app_name = "Serve MMClassification"
|
|
15
|
+
self._serve_slug = "supervisely-ecosystem/mmclassification/supervisely/serve"
|
|
13
16
|
self._framework_name = "MMClassification"
|
|
14
17
|
self._framework_folder = "/mmclassification"
|
|
15
18
|
self._weights_folder = "checkpoints"
|
|
@@ -17,6 +20,7 @@ class MMClassification(BaseTrainArtifacts):
|
|
|
17
20
|
self._weights_ext = ".pth"
|
|
18
21
|
self._pattern = re_compile(r"^/mmclassification/\d+_[^/]+/?$")
|
|
19
22
|
self._available_task_types: List[str] = ["classification"]
|
|
23
|
+
self._require_runtime = False
|
|
20
24
|
|
|
21
25
|
def get_task_id(self, artifacts_folder: str) -> str:
|
|
22
26
|
parts = artifacts_folder.split("/")
|
|
@@ -13,6 +13,9 @@ class MMDetection(BaseTrainArtifacts):
|
|
|
13
13
|
super().__init__(team_id)
|
|
14
14
|
|
|
15
15
|
self._app_name = "Train MMDetection"
|
|
16
|
+
self._slug = "supervisely-ecosystem/mmdetection/train"
|
|
17
|
+
self._serve_app_name = "Serve MMDetection"
|
|
18
|
+
self._serve_slug = "supervisely-ecosystem/mmdetection/serve"
|
|
16
19
|
self._framework_name = "MMDetection"
|
|
17
20
|
self._framework_folder = "/mmdetection"
|
|
18
21
|
self._weights_folder = "checkpoints/data"
|
|
@@ -22,6 +25,7 @@ class MMDetection(BaseTrainArtifacts):
|
|
|
22
25
|
self._config_file = "config.py"
|
|
23
26
|
self._pattern = re_compile(r"^/mmdetection/\d+_[^/]+/?$")
|
|
24
27
|
self._available_task_types: List[str] = ["object detection", "instance segmentation"]
|
|
28
|
+
self._require_runtime = False
|
|
25
29
|
|
|
26
30
|
def get_task_id(self, artifacts_folder: str) -> str:
|
|
27
31
|
parts = artifacts_folder.split("/")
|
|
@@ -59,6 +63,9 @@ class MMDetection3(BaseTrainArtifacts):
|
|
|
59
63
|
super().__init__(team_id)
|
|
60
64
|
|
|
61
65
|
self._app_name = "Train MMDetection 3.0"
|
|
66
|
+
self._slug = "Serve MMDetection 3.0"
|
|
67
|
+
self._serve_app_name = "supervisely-ecosystem/train-mmdetection-v3"
|
|
68
|
+
self._serve_slug = "supervisely-ecosystem/serve-mmdetection-v3"
|
|
62
69
|
self._framework_name = "MMDetection 3.0"
|
|
63
70
|
self._framework_folder = "/mmdetection-3"
|
|
64
71
|
self._weights_folder = None
|
|
@@ -67,7 +74,8 @@ class MMDetection3(BaseTrainArtifacts):
|
|
|
67
74
|
self._config_file = "config.py"
|
|
68
75
|
self._pattern = re_compile(r"^/mmdetection-3/\d+_[^/]+/?$")
|
|
69
76
|
self._available_task_types: List[str] = ["object detection", "instance segmentation"]
|
|
70
|
-
|
|
77
|
+
self._require_runtime = False
|
|
78
|
+
|
|
71
79
|
def get_task_id(self, artifacts_folder: str) -> str:
|
|
72
80
|
parts = artifacts_folder.split("/")
|
|
73
81
|
if len(parts) < 3:
|
|
@@ -10,6 +10,9 @@ class MMSegmentation(BaseTrainArtifacts):
|
|
|
10
10
|
super().__init__(team_id)
|
|
11
11
|
|
|
12
12
|
self._app_name = "Train MMSegmentation"
|
|
13
|
+
self._slug = "supervisely-ecosystem/mmsegmentation/train"
|
|
14
|
+
self._serve_app_name = "Serve MMSegmentation"
|
|
15
|
+
self._serve_slug = "supervisely-ecosystem/mmsegmentation/serve"
|
|
13
16
|
self._framework_name = "MMSegmentation"
|
|
14
17
|
self._framework_folder = "/mmsegmentation"
|
|
15
18
|
self._weights_folder = "checkpoints/data"
|
|
@@ -18,6 +21,7 @@ class MMSegmentation(BaseTrainArtifacts):
|
|
|
18
21
|
self._config_file = "config.py"
|
|
19
22
|
self._pattern = re_compile(r"^/mmsegmentation/\d+_[^/]+/?$")
|
|
20
23
|
self._available_task_types: List[str] = ["instance segmentation"]
|
|
24
|
+
self._require_runtime = False
|
|
21
25
|
|
|
22
26
|
def get_task_id(self, artifacts_folder: str) -> str:
|
|
23
27
|
return artifacts_folder.split("/")[2].split("_")[0]
|
supervisely/nn/artifacts/ritm.py
CHANGED
|
@@ -10,6 +10,9 @@ class RITM(BaseTrainArtifacts):
|
|
|
10
10
|
super().__init__(team_id)
|
|
11
11
|
|
|
12
12
|
self._app_name = "Train RITM"
|
|
13
|
+
self._slug = "supervisely-ecosystem/ritm-training/supervisely/train"
|
|
14
|
+
self._serve_app_name = None
|
|
15
|
+
self._serve_slug = None
|
|
13
16
|
self._framework_name = "RITM"
|
|
14
17
|
self._framework_folder = "/RITM_training"
|
|
15
18
|
self._weights_folder = "checkpoints"
|
|
@@ -18,6 +21,7 @@ class RITM(BaseTrainArtifacts):
|
|
|
18
21
|
self._weights_ext = ".pth"
|
|
19
22
|
self._pattern = re_compile(r"^/RITM_training/\d+_[^/]+/?$")
|
|
20
23
|
self._available_task_types: List[str] = ["interactive segmentation"]
|
|
24
|
+
self._require_runtime = False
|
|
21
25
|
|
|
22
26
|
def get_task_id(self, artifacts_folder: str) -> str:
|
|
23
27
|
parts = artifacts_folder.split("/")
|
|
@@ -10,6 +10,9 @@ class RTDETR(BaseTrainArtifacts):
|
|
|
10
10
|
super().__init__(team_id)
|
|
11
11
|
|
|
12
12
|
self._app_name = "Train RT-DETR"
|
|
13
|
+
self._slug = "supervisely-ecosystem/rt-detr/supervisely_integration/train"
|
|
14
|
+
self._serve_app_name = "Serve RT-DETR"
|
|
15
|
+
self._serve_slug = "supervisely-ecosystem/rt-detr/supervisely_integration/serve"
|
|
13
16
|
self._framework_name = "RT-DETR"
|
|
14
17
|
self._framework_folder = "/RT-DETR"
|
|
15
18
|
self._weights_folder = "weights"
|
|
@@ -18,6 +21,7 @@ class RTDETR(BaseTrainArtifacts):
|
|
|
18
21
|
self._config_file = "config.yml"
|
|
19
22
|
self._pattern = re_compile(r"^/RT-DETR/[^/]+/\d+/?$")
|
|
20
23
|
self._available_task_types: List[str] = ["object detection"]
|
|
24
|
+
self._require_runtime = False
|
|
21
25
|
|
|
22
26
|
def get_task_id(self, artifacts_folder: str) -> str:
|
|
23
27
|
return artifacts_folder.split("/")[-1]
|
supervisely/nn/artifacts/unet.py
CHANGED
|
@@ -10,6 +10,9 @@ class UNet(BaseTrainArtifacts):
|
|
|
10
10
|
super().__init__(team_id)
|
|
11
11
|
|
|
12
12
|
self._app_name = "Train UNet"
|
|
13
|
+
self._slug = "supervisely-ecosystem/unet/supervisely/train"
|
|
14
|
+
self._serve_app_name = "Serve UNet"
|
|
15
|
+
self._serve_slug = "supervisely-ecosystem/unet/supervisely/serve"
|
|
13
16
|
self._framework_name = "UNet"
|
|
14
17
|
self._framework_folder = "/unet"
|
|
15
18
|
self._weights_folder = "checkpoints"
|
|
@@ -18,6 +21,7 @@ class UNet(BaseTrainArtifacts):
|
|
|
18
21
|
self._config_file = "train_args.json"
|
|
19
22
|
self._pattern = re_compile(r"^/unet/\d+_[^/]+/?$")
|
|
20
23
|
self._available_task_types: List[str] = ["semantic segmentation"]
|
|
24
|
+
self._require_runtime = False
|
|
21
25
|
|
|
22
26
|
def get_task_id(self, artifacts_folder: str) -> str:
|
|
23
27
|
parts = artifacts_folder.split("/")
|