supervisely 6.73.277__py3-none-any.whl → 6.73.278__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of supervisely might be problematic. Click here for more details.

@@ -1,8 +1,27 @@
1
- from typing import List, Tuple, Union
2
-
3
- from supervisely import AnyGeometry, GraphNodes, Polygon, Rectangle, logger
4
- from supervisely.geometry.graph import KeypointsTemplate, Node
1
+ import os
2
+ import shutil
3
+ from pathlib import Path
4
+ from typing import Callable, List, Literal, Optional, Tuple, Union
5
+
6
+ import yaml
7
+ from tqdm import tqdm
8
+
9
+ from supervisely._utils import generate_free_name
10
+ from supervisely.annotation.annotation import Annotation
11
+ from supervisely.annotation.label import Label
12
+ from supervisely.geometry.alpha_mask import AlphaMask
13
+ from supervisely.geometry.any_geometry import AnyGeometry
14
+ from supervisely.geometry.bitmap import Bitmap
15
+ from supervisely.geometry.graph import GraphNodes, KeypointsTemplate, Node
16
+ from supervisely.geometry.polygon import Polygon
17
+ from supervisely.geometry.polyline import Polyline
18
+ from supervisely.geometry.rectangle import Rectangle
5
19
  from supervisely.imaging.color import generate_rgb
20
+ from supervisely.io.fs import get_file_name_with_ext, touch
21
+ from supervisely.project.project import Dataset, OpenMode, Project
22
+ from supervisely.project.project_meta import ProjectMeta
23
+ from supervisely.sly_logger import logger
24
+ from supervisely.task.progress import tqdm_sly
6
25
 
7
26
  YOLO_DETECTION_COORDS_NUM = 4
8
27
  YOLO_SEGM_MIN_COORDS_NUM = 6
@@ -322,3 +341,319 @@ def get_geometry(
322
341
  num_keypoints=num_keypoints,
323
342
  num_dims=num_dims,
324
343
  )
344
+
345
+
346
+ def rectangle_to_yolo_line(
347
+ class_idx: int,
348
+ geometry: Rectangle,
349
+ img_height: int,
350
+ img_width: int,
351
+ ):
352
+ x = geometry.center.col / img_width
353
+ y = geometry.center.row / img_height
354
+ w = geometry.width / img_width
355
+ h = geometry.height / img_height
356
+ return f"{class_idx} {x:.6f} {y:.6f} {w:.6f} {h:.6f}"
357
+
358
+
359
+ def polygon_to_yolo_line(
360
+ class_idx: int,
361
+ geometry: Polygon,
362
+ img_height: int,
363
+ img_width: int,
364
+ ) -> str:
365
+ coords = []
366
+ for point in geometry.exterior:
367
+ x = point.col / img_width
368
+ y = point.row / img_height
369
+ coords.extend([x, y])
370
+ return f"{class_idx} {' '.join(map(lambda coord: f'{coord:.6f}', coords))}"
371
+
372
+
373
+ def keypoints_to_yolo_line(
374
+ class_idx: int,
375
+ geometry: GraphNodes,
376
+ img_height: int,
377
+ img_width: int,
378
+ max_kpts_count: int,
379
+ ):
380
+ bbox = geometry.to_bbox()
381
+ x, y, w, h = bbox.center.col, bbox.center.row, bbox.width, bbox.height
382
+ x, y, w, h = x / img_width, y / img_height, w / img_width, h / img_height
383
+
384
+ line = f"{class_idx} {x:.6f} {y:.6f} {w:.6f} {h:.6f}"
385
+
386
+ for node in geometry.nodes.values():
387
+ node: Node
388
+ visible = 2 if not node.disabled else 1
389
+ line += (
390
+ f" {node.location.col / img_width:.6f} {node.location.row / img_height:.6f} {visible}"
391
+ )
392
+ if len(geometry.nodes) < max_kpts_count:
393
+ for _ in range(max_kpts_count - len(geometry.nodes)):
394
+ line += " 0 0 0"
395
+
396
+ return line
397
+
398
+
399
+ def convert_label_geometry_if_needed(
400
+ label: Label,
401
+ task_type: Literal["detection", "segmentation", "pose"],
402
+ verbose: bool = False,
403
+ ) -> List[Label]:
404
+ if task_type == "detection":
405
+ available_geometry_type = Rectangle
406
+ convertable_geometry_types = [Polygon, GraphNodes, Bitmap, Polyline, AlphaMask, AnyGeometry]
407
+ elif task_type == "segmentation":
408
+ available_geometry_type = Polygon
409
+ convertable_geometry_types = [Bitmap, AlphaMask, AnyGeometry]
410
+ elif task_type == "pose":
411
+ available_geometry_type = GraphNodes
412
+ convertable_geometry_types = []
413
+ else:
414
+ raise ValueError(
415
+ f"Unsupported task type: {task_type}. "
416
+ "Supported types: 'detection', 'segmentation', 'pose'"
417
+ )
418
+
419
+ if label.obj_class.geometry_type == available_geometry_type:
420
+ return [label]
421
+
422
+ need_convert = label.obj_class.geometry_type in convertable_geometry_types
423
+
424
+ if need_convert:
425
+ new_obj_cls = label.obj_class.clone(geometry_type=available_geometry_type)
426
+ return label.convert(new_obj_cls)
427
+
428
+ if verbose:
429
+ logger.warning(
430
+ f"Label '{label.obj_class.name}' has unsupported geometry type: "
431
+ f"{type(label.obj_class.geometry_type)}. Skipping."
432
+ )
433
+ return []
434
+
435
+
436
+ def label_to_yolo_lines(
437
+ label: Label,
438
+ img_height: int,
439
+ img_width: int,
440
+ class_names: List[str],
441
+ task_type: Literal["detection", "segmentation", "pose"],
442
+ ) -> List[str]:
443
+ """
444
+ Convert the Supervisely Label to a line in the YOLO format.
445
+ """
446
+
447
+ labels = convert_label_geometry_if_needed(label, task_type)
448
+ class_idx = class_names.index(label.obj_class.name)
449
+
450
+ lines = []
451
+ for label in labels:
452
+ if task_type == "detection":
453
+ yolo_line = rectangle_to_yolo_line(
454
+ class_idx=class_idx,
455
+ geometry=label.geometry,
456
+ img_height=img_height,
457
+ img_width=img_width,
458
+ )
459
+ elif task_type == "segmentation":
460
+ yolo_line = polygon_to_yolo_line(
461
+ class_idx=class_idx,
462
+ geometry=label.geometry,
463
+ img_height=img_height,
464
+ img_width=img_width,
465
+ )
466
+ elif task_type == "pose":
467
+ nodes_field = label.obj_class.geometry_type.items_json_field
468
+ max_kpts_count = len(label.obj_class.geometry_config[nodes_field])
469
+ yolo_line = keypoints_to_yolo_line(
470
+ class_idx=class_idx,
471
+ geometry=label.geometry,
472
+ img_height=img_height,
473
+ img_width=img_width,
474
+ max_kpts_count=max_kpts_count,
475
+ )
476
+ else:
477
+ raise ValueError(f"Unsupported geometry type: {type(label.obj_class.geometry_type)}")
478
+
479
+ if yolo_line is not None:
480
+ lines.append(yolo_line)
481
+
482
+ return lines
483
+
484
+
485
+ def sly_ann_to_yolo(
486
+ ann: Annotation,
487
+ class_names: List[str],
488
+ task_type: Literal["detection", "segmentation", "pose"] = "detection",
489
+ ) -> List[str]:
490
+ """
491
+ Convert the Supervisely annotation to the YOLO format.
492
+ """
493
+
494
+ h, w = ann.img_size
495
+ yolo_lines = []
496
+ for label in ann.labels:
497
+ lines = label_to_yolo_lines(
498
+ label=label,
499
+ img_height=h,
500
+ img_width=w,
501
+ class_names=class_names,
502
+ task_type=task_type,
503
+ )
504
+ yolo_lines.extend(lines)
505
+ return yolo_lines
506
+
507
+
508
+ def sly_ds_to_yolo(
509
+ dataset: Dataset,
510
+ meta: ProjectMeta,
511
+ dest_dir: Optional[str] = None,
512
+ task_type: Literal["detection", "segmentation", "pose"] = "detection",
513
+ log_progress: bool = False,
514
+ progress_cb: Optional[Union[tqdm, Callable]] = None,
515
+ ) -> str:
516
+
517
+ if progress_cb is not None:
518
+ log_progress = False
519
+
520
+ if log_progress:
521
+ progress_cb = tqdm_sly(
522
+ desc=f"Converting dataset '{dataset.short_name}' to YOLO format",
523
+ total=len(dataset),
524
+ ).update
525
+
526
+ dest_dir = Path(dataset.path) / "yolo" if dest_dir is None else Path(dest_dir)
527
+ dest_dir.mkdir(parents=True, exist_ok=True)
528
+
529
+ # * create train and val directories
530
+ images_dir = dest_dir / "images"
531
+ labels_dir = dest_dir / "labels"
532
+ train_images_dir = images_dir / "train"
533
+ train_labels_dir = labels_dir / "train"
534
+ val_images_dir = images_dir / "val"
535
+ val_labels_dir = labels_dir / "val"
536
+ for dir_path in [train_images_dir, train_labels_dir, val_images_dir, val_labels_dir]:
537
+ dir_path.mkdir(parents=True, exist_ok=True)
538
+
539
+ # * convert annotations and copy images
540
+ class_names = [obj_class.name for obj_class in meta.obj_classes]
541
+ used_names = set(os.listdir(train_images_dir)) | set(os.listdir(val_images_dir))
542
+ for name in dataset.get_items_names():
543
+ ann_path = dataset.get_ann_path(name)
544
+ ann = Annotation.load_json_file(ann_path, meta)
545
+
546
+ images_dir = val_images_dir if ann.img_tags.get("val") else train_images_dir
547
+ labels_dir = val_labels_dir if ann.img_tags.get("val") else train_labels_dir
548
+
549
+ img_path = Path(dataset.get_img_path(name))
550
+ img_name = f"{dataset.short_name}_{get_file_name_with_ext(img_path)}"
551
+ img_name = generate_free_name(used_names, img_name, with_ext=True, extend_used_names=True)
552
+ shutil.copy2(img_path, images_dir / img_name)
553
+
554
+ label_path = str(labels_dir / f"{img_name}.txt")
555
+ yolo_lines = ann.to_yolo(class_names, task_type)
556
+ if len(yolo_lines) > 0:
557
+ with open(label_path, "w") as f:
558
+ f.write("\n".join(yolo_lines))
559
+ else:
560
+ touch(label_path)
561
+
562
+ if progress_cb is not None:
563
+ progress_cb(1)
564
+
565
+ # * save data config file if it does not exist
566
+ config_path = dest_dir / "data_config.yaml"
567
+ if not config_path.exists():
568
+ save_yolo_config(meta, dest_dir, with_keypoint=task_type == "pose")
569
+
570
+ return str(dest_dir)
571
+
572
+
573
+ def save_yolo_config(meta: ProjectMeta, dest_dir: str, with_keypoint: bool = False):
574
+ dest_dir = Path(dest_dir)
575
+ save_path = dest_dir / "data_config.yaml"
576
+ class_names = [c.name for c in meta.obj_classes]
577
+ class_colors = [c.color for c in meta.obj_classes]
578
+ data_yaml = {
579
+ "train": f"../{str(dest_dir.name)}/images/train",
580
+ "val": f"../{str(dest_dir.name)}/images/val",
581
+ "nc": len(class_names),
582
+ "names": class_names,
583
+ "colors": class_colors,
584
+ }
585
+ has_keypoints = any(c.geometry_type == GraphNodes for c in meta.obj_classes)
586
+ if has_keypoints and with_keypoint:
587
+ max_kpts_count = 0
588
+ for obj_class in meta.obj_classes:
589
+ if issubclass(obj_class.geometry_type, GraphNodes):
590
+ field_name = obj_class.geometry_type.items_json_field
591
+ max_kpts_count = max(max_kpts_count, len(obj_class.geometry_config[field_name]))
592
+ data_yaml["kpt_shape"] = [max_kpts_count, 3]
593
+ with open(save_path, "w") as f:
594
+ yaml.dump(data_yaml, f, default_flow_style=None)
595
+
596
+ logger.info(f"Data config file has been saved to {str(save_path)}")
597
+
598
+
599
+ def sly_project_to_yolo(
600
+ project: Union[Project, str],
601
+ dest_dir: Optional[str] = None,
602
+ task_type: Literal["detection", "segmentation", "pose"] = "detection",
603
+ log_progress: bool = False,
604
+ progress_cb: Optional[Callable] = None,
605
+ ):
606
+ """
607
+ Convert Supervisely project to YOLO format.
608
+
609
+ :param dest_dir: Destination directory.
610
+ :type dest_dir: :class:`str`, optional
611
+ :param log_progress: Show uploading progress bar.
612
+ :type log_progress: :class:`bool`
613
+ :param progress_cb: Function for tracking conversion progress (for all items in the project).
614
+ :type progress_cb: callable, optional
615
+ :return: None
616
+ :rtype: NoneType
617
+
618
+ :Usage example:
619
+
620
+ .. code-block:: python
621
+
622
+ import supervisely as sly
623
+
624
+ # Local folder with Project
625
+ project_directory = "/home/admin/work/supervisely/source/project"
626
+
627
+ # Convert Project to YOLO format
628
+ sly.Project(project_directory).to_yolo(log_progress=True)
629
+ """
630
+ if isinstance(project, str):
631
+ project = Project(project, mode=OpenMode.READ)
632
+
633
+ dest_dir = Path(project.directory).parent / "yolo" if dest_dir is None else Path(dest_dir)
634
+
635
+ dest_dir.mkdir(parents=True, exist_ok=True)
636
+ if len(os.listdir(dest_dir)) > 0:
637
+ raise FileExistsError(f"Directory {dest_dir} is not empty.")
638
+
639
+ if progress_cb is not None:
640
+ log_progress = False
641
+
642
+ if log_progress:
643
+ progress_cb = tqdm_sly(
644
+ desc="Converting Supervisely project to YOLO format", total=project.total_items
645
+ ).update
646
+
647
+ save_yolo_config(project.meta, dest_dir, with_keypoint=task_type == "pose")
648
+
649
+ for dataset in project.datasets:
650
+ dataset: Dataset
651
+ dataset.to_yolo(
652
+ meta=project.meta,
653
+ dest_dir=dest_dir,
654
+ task_type=task_type,
655
+ log_progress=log_progress,
656
+ progress_cb=progress_cb,
657
+ )
658
+ logger.info(f"Dataset '{dataset.short_name}' has been converted to YOLO format.")
659
+ logger.info(f"Project '{project.name}' has been converted to YOLO format.")
@@ -0,0 +1,8 @@
1
+ # Pointcloud
2
+ from supervisely.convert.pointcloud.sly.sly_pointcloud_converter import SLYPointcloudConverter
3
+ from supervisely.convert.pointcloud.las.las_converter import LasConverter
4
+ from supervisely.convert.pointcloud.ply.ply_converter import PlyConverter
5
+ from supervisely.convert.pointcloud.bag.bag_converter import BagConverter
6
+ from supervisely.convert.pointcloud.lyft.lyft_converter import LyftConverter
7
+ from supervisely.convert.pointcloud.nuscenes_conv.nuscenes_converter import NuscenesConverter
8
+ from supervisely.convert.pointcloud.kitti_3d.kitti_3d_converter import KITTI3DConverter
@@ -0,0 +1,9 @@
1
+ # Pointcloud Episodes
2
+ from supervisely.convert.pointcloud_episodes.sly.sly_pointcloud_episodes_converter import (
3
+ SLYPointcloudEpisodesConverter,
4
+ )
5
+ from supervisely.convert.pointcloud_episodes.bag.bag_converter import BagEpisodesConverter
6
+ from supervisely.convert.pointcloud_episodes.lyft.lyft_converter import LyftEpisodesConverter
7
+ from supervisely.convert.pointcloud_episodes.nuscenes_conv.nuscenes_converter import (
8
+ NuscenesEpisodesConverter,
9
+ )
@@ -0,0 +1,3 @@
1
+ # Video
2
+ from supervisely.convert.video.mot.mot_converter import MOTConverter
3
+ from supervisely.convert.video.sly.sly_video_converter import SLYVideoConverter
@@ -0,0 +1,3 @@
1
+ # Volume
2
+ from supervisely.convert.volume.sly.sly_volume_converter import SLYVolumeConverter
3
+ from supervisely.convert.volume.dicom.dicom_converter import DICOMConverter
@@ -102,7 +102,7 @@ class TrainingArtifacts:
102
102
  apps = api.app.get_list(
103
103
  team_id,
104
104
  filter=[{"field": "name", "operator": "=", "value": app_name}],
105
- only_running=True,
105
+ only_running=False,
106
106
  )
107
107
  if len(apps) == 1:
108
108
  app_info = apps[0]
@@ -1674,8 +1674,9 @@ class TrainApp:
1674
1674
  self.gui.training_artifacts.model_benchmark_report_thumbnail.show()
1675
1675
  self.gui.training_artifacts.model_benchmark_report_field.show()
1676
1676
  else:
1677
- self.gui.training_artifacts.model_benchmark_fail_text.show()
1678
- self.gui.training_artifacts.model_benchmark_report_field.show()
1677
+ if self.gui.hyperparameters_selector.get_model_benchmark_checkbox_value():
1678
+ self.gui.training_artifacts.model_benchmark_fail_text.show()
1679
+ self.gui.training_artifacts.model_benchmark_report_field.show()
1679
1680
  # ---------------------------- #
1680
1681
 
1681
1682
  # Set instruction to GUI