ultralytics 8.1.29__py3-none-any.whl → 8.3.63__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (247) hide show
  1. tests/__init__.py +22 -0
  2. tests/conftest.py +83 -0
  3. tests/test_cli.py +122 -0
  4. tests/test_cuda.py +155 -0
  5. tests/test_engine.py +131 -0
  6. tests/test_exports.py +216 -0
  7. tests/test_integrations.py +150 -0
  8. tests/test_python.py +615 -0
  9. tests/test_solutions.py +94 -0
  10. ultralytics/__init__.py +11 -8
  11. ultralytics/cfg/__init__.py +569 -131
  12. ultralytics/cfg/datasets/Argoverse.yaml +2 -1
  13. ultralytics/cfg/datasets/DOTAv1.5.yaml +3 -2
  14. ultralytics/cfg/datasets/DOTAv1.yaml +3 -2
  15. ultralytics/cfg/datasets/GlobalWheat2020.yaml +3 -2
  16. ultralytics/cfg/datasets/ImageNet.yaml +2 -1
  17. ultralytics/cfg/datasets/Objects365.yaml +5 -4
  18. ultralytics/cfg/datasets/SKU-110K.yaml +2 -1
  19. ultralytics/cfg/datasets/VOC.yaml +3 -2
  20. ultralytics/cfg/datasets/VisDrone.yaml +6 -5
  21. ultralytics/cfg/datasets/african-wildlife.yaml +25 -0
  22. ultralytics/cfg/datasets/brain-tumor.yaml +23 -0
  23. ultralytics/cfg/datasets/carparts-seg.yaml +3 -2
  24. ultralytics/cfg/datasets/coco-pose.yaml +7 -6
  25. ultralytics/cfg/datasets/coco.yaml +3 -2
  26. ultralytics/cfg/datasets/coco128-seg.yaml +4 -3
  27. ultralytics/cfg/datasets/coco128.yaml +4 -3
  28. ultralytics/cfg/datasets/coco8-pose.yaml +3 -2
  29. ultralytics/cfg/datasets/coco8-seg.yaml +3 -2
  30. ultralytics/cfg/datasets/coco8.yaml +3 -2
  31. ultralytics/cfg/datasets/crack-seg.yaml +3 -2
  32. ultralytics/cfg/datasets/dog-pose.yaml +24 -0
  33. ultralytics/cfg/datasets/dota8.yaml +3 -2
  34. ultralytics/cfg/datasets/hand-keypoints.yaml +26 -0
  35. ultralytics/cfg/datasets/lvis.yaml +1236 -0
  36. ultralytics/cfg/datasets/medical-pills.yaml +22 -0
  37. ultralytics/cfg/datasets/open-images-v7.yaml +2 -1
  38. ultralytics/cfg/datasets/package-seg.yaml +5 -4
  39. ultralytics/cfg/datasets/signature.yaml +21 -0
  40. ultralytics/cfg/datasets/tiger-pose.yaml +3 -2
  41. ultralytics/cfg/datasets/xView.yaml +2 -1
  42. ultralytics/cfg/default.yaml +14 -11
  43. ultralytics/cfg/models/11/yolo11-cls-resnet18.yaml +24 -0
  44. ultralytics/cfg/models/11/yolo11-cls.yaml +33 -0
  45. ultralytics/cfg/models/11/yolo11-obb.yaml +50 -0
  46. ultralytics/cfg/models/11/yolo11-pose.yaml +51 -0
  47. ultralytics/cfg/models/11/yolo11-seg.yaml +50 -0
  48. ultralytics/cfg/models/11/yolo11.yaml +50 -0
  49. ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +5 -2
  50. ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +5 -2
  51. ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +5 -2
  52. ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +5 -2
  53. ultralytics/cfg/models/v10/yolov10b.yaml +45 -0
  54. ultralytics/cfg/models/v10/yolov10l.yaml +45 -0
  55. ultralytics/cfg/models/v10/yolov10m.yaml +45 -0
  56. ultralytics/cfg/models/v10/yolov10n.yaml +45 -0
  57. ultralytics/cfg/models/v10/yolov10s.yaml +45 -0
  58. ultralytics/cfg/models/v10/yolov10x.yaml +45 -0
  59. ultralytics/cfg/models/v3/yolov3-spp.yaml +5 -2
  60. ultralytics/cfg/models/v3/yolov3-tiny.yaml +5 -2
  61. ultralytics/cfg/models/v3/yolov3.yaml +5 -2
  62. ultralytics/cfg/models/v5/yolov5-p6.yaml +5 -2
  63. ultralytics/cfg/models/v5/yolov5.yaml +5 -2
  64. ultralytics/cfg/models/v6/yolov6.yaml +5 -2
  65. ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +5 -2
  66. ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +5 -2
  67. ultralytics/cfg/models/v8/yolov8-cls.yaml +5 -2
  68. ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +6 -2
  69. ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +6 -2
  70. ultralytics/cfg/models/v8/yolov8-ghost.yaml +5 -2
  71. ultralytics/cfg/models/v8/yolov8-obb.yaml +5 -2
  72. ultralytics/cfg/models/v8/yolov8-p2.yaml +5 -2
  73. ultralytics/cfg/models/v8/yolov8-p6.yaml +10 -7
  74. ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +5 -2
  75. ultralytics/cfg/models/v8/yolov8-pose.yaml +5 -2
  76. ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +5 -2
  77. ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +5 -2
  78. ultralytics/cfg/models/v8/yolov8-seg.yaml +5 -2
  79. ultralytics/cfg/models/v8/yolov8-world.yaml +5 -2
  80. ultralytics/cfg/models/v8/yolov8-worldv2.yaml +5 -2
  81. ultralytics/cfg/models/v8/yolov8.yaml +5 -2
  82. ultralytics/cfg/models/v9/yolov9c-seg.yaml +41 -0
  83. ultralytics/cfg/models/v9/yolov9c.yaml +30 -25
  84. ultralytics/cfg/models/v9/yolov9e-seg.yaml +64 -0
  85. ultralytics/cfg/models/v9/yolov9e.yaml +46 -42
  86. ultralytics/cfg/models/v9/yolov9m.yaml +41 -0
  87. ultralytics/cfg/models/v9/yolov9s.yaml +41 -0
  88. ultralytics/cfg/models/v9/yolov9t.yaml +41 -0
  89. ultralytics/cfg/solutions/default.yaml +24 -0
  90. ultralytics/cfg/trackers/botsort.yaml +8 -5
  91. ultralytics/cfg/trackers/bytetrack.yaml +8 -5
  92. ultralytics/data/__init__.py +14 -3
  93. ultralytics/data/annotator.py +37 -15
  94. ultralytics/data/augment.py +1783 -289
  95. ultralytics/data/base.py +62 -27
  96. ultralytics/data/build.py +37 -8
  97. ultralytics/data/converter.py +196 -36
  98. ultralytics/data/dataset.py +233 -94
  99. ultralytics/data/loaders.py +199 -96
  100. ultralytics/data/split_dota.py +39 -29
  101. ultralytics/data/utils.py +111 -41
  102. ultralytics/engine/__init__.py +1 -1
  103. ultralytics/engine/exporter.py +579 -244
  104. ultralytics/engine/model.py +604 -252
  105. ultralytics/engine/predictor.py +22 -11
  106. ultralytics/engine/results.py +1228 -218
  107. ultralytics/engine/trainer.py +191 -129
  108. ultralytics/engine/tuner.py +18 -18
  109. ultralytics/engine/validator.py +18 -15
  110. ultralytics/hub/__init__.py +31 -13
  111. ultralytics/hub/auth.py +11 -7
  112. ultralytics/hub/google/__init__.py +159 -0
  113. ultralytics/hub/session.py +128 -94
  114. ultralytics/hub/utils.py +20 -21
  115. ultralytics/models/__init__.py +4 -2
  116. ultralytics/models/fastsam/__init__.py +2 -3
  117. ultralytics/models/fastsam/model.py +26 -4
  118. ultralytics/models/fastsam/predict.py +127 -63
  119. ultralytics/models/fastsam/utils.py +1 -44
  120. ultralytics/models/fastsam/val.py +1 -1
  121. ultralytics/models/nas/__init__.py +1 -1
  122. ultralytics/models/nas/model.py +21 -10
  123. ultralytics/models/nas/predict.py +3 -6
  124. ultralytics/models/nas/val.py +4 -4
  125. ultralytics/models/rtdetr/__init__.py +1 -1
  126. ultralytics/models/rtdetr/model.py +1 -1
  127. ultralytics/models/rtdetr/predict.py +6 -8
  128. ultralytics/models/rtdetr/train.py +6 -2
  129. ultralytics/models/rtdetr/val.py +3 -3
  130. ultralytics/models/sam/__init__.py +3 -3
  131. ultralytics/models/sam/amg.py +29 -23
  132. ultralytics/models/sam/build.py +211 -13
  133. ultralytics/models/sam/model.py +91 -30
  134. ultralytics/models/sam/modules/__init__.py +1 -1
  135. ultralytics/models/sam/modules/blocks.py +1129 -0
  136. ultralytics/models/sam/modules/decoders.py +381 -53
  137. ultralytics/models/sam/modules/encoders.py +515 -324
  138. ultralytics/models/sam/modules/memory_attention.py +237 -0
  139. ultralytics/models/sam/modules/sam.py +969 -21
  140. ultralytics/models/sam/modules/tiny_encoder.py +425 -154
  141. ultralytics/models/sam/modules/transformer.py +159 -60
  142. ultralytics/models/sam/modules/utils.py +293 -0
  143. ultralytics/models/sam/predict.py +1263 -132
  144. ultralytics/models/utils/__init__.py +1 -1
  145. ultralytics/models/utils/loss.py +36 -24
  146. ultralytics/models/utils/ops.py +3 -7
  147. ultralytics/models/yolo/__init__.py +3 -3
  148. ultralytics/models/yolo/classify/__init__.py +1 -1
  149. ultralytics/models/yolo/classify/predict.py +7 -8
  150. ultralytics/models/yolo/classify/train.py +17 -22
  151. ultralytics/models/yolo/classify/val.py +8 -4
  152. ultralytics/models/yolo/detect/__init__.py +1 -1
  153. ultralytics/models/yolo/detect/predict.py +3 -5
  154. ultralytics/models/yolo/detect/train.py +11 -4
  155. ultralytics/models/yolo/detect/val.py +90 -52
  156. ultralytics/models/yolo/model.py +14 -9
  157. ultralytics/models/yolo/obb/__init__.py +1 -1
  158. ultralytics/models/yolo/obb/predict.py +2 -2
  159. ultralytics/models/yolo/obb/train.py +5 -3
  160. ultralytics/models/yolo/obb/val.py +41 -23
  161. ultralytics/models/yolo/pose/__init__.py +1 -1
  162. ultralytics/models/yolo/pose/predict.py +3 -5
  163. ultralytics/models/yolo/pose/train.py +2 -2
  164. ultralytics/models/yolo/pose/val.py +51 -17
  165. ultralytics/models/yolo/segment/__init__.py +1 -1
  166. ultralytics/models/yolo/segment/predict.py +3 -5
  167. ultralytics/models/yolo/segment/train.py +2 -2
  168. ultralytics/models/yolo/segment/val.py +60 -19
  169. ultralytics/models/yolo/world/__init__.py +5 -0
  170. ultralytics/models/yolo/world/train.py +92 -0
  171. ultralytics/models/yolo/world/train_world.py +109 -0
  172. ultralytics/nn/__init__.py +1 -1
  173. ultralytics/nn/autobackend.py +228 -93
  174. ultralytics/nn/modules/__init__.py +39 -14
  175. ultralytics/nn/modules/activation.py +21 -0
  176. ultralytics/nn/modules/block.py +526 -66
  177. ultralytics/nn/modules/conv.py +24 -7
  178. ultralytics/nn/modules/head.py +177 -34
  179. ultralytics/nn/modules/transformer.py +6 -5
  180. ultralytics/nn/modules/utils.py +1 -2
  181. ultralytics/nn/tasks.py +226 -82
  182. ultralytics/solutions/__init__.py +30 -1
  183. ultralytics/solutions/ai_gym.py +96 -143
  184. ultralytics/solutions/analytics.py +247 -0
  185. ultralytics/solutions/distance_calculation.py +78 -135
  186. ultralytics/solutions/heatmap.py +93 -247
  187. ultralytics/solutions/object_counter.py +184 -259
  188. ultralytics/solutions/parking_management.py +246 -0
  189. ultralytics/solutions/queue_management.py +112 -0
  190. ultralytics/solutions/region_counter.py +116 -0
  191. ultralytics/solutions/security_alarm.py +144 -0
  192. ultralytics/solutions/solutions.py +178 -0
  193. ultralytics/solutions/speed_estimation.py +86 -174
  194. ultralytics/solutions/streamlit_inference.py +190 -0
  195. ultralytics/solutions/trackzone.py +68 -0
  196. ultralytics/trackers/__init__.py +1 -1
  197. ultralytics/trackers/basetrack.py +32 -13
  198. ultralytics/trackers/bot_sort.py +61 -28
  199. ultralytics/trackers/byte_tracker.py +83 -51
  200. ultralytics/trackers/track.py +21 -6
  201. ultralytics/trackers/utils/__init__.py +1 -1
  202. ultralytics/trackers/utils/gmc.py +62 -48
  203. ultralytics/trackers/utils/kalman_filter.py +166 -35
  204. ultralytics/trackers/utils/matching.py +40 -21
  205. ultralytics/utils/__init__.py +511 -239
  206. ultralytics/utils/autobatch.py +40 -22
  207. ultralytics/utils/benchmarks.py +266 -85
  208. ultralytics/utils/callbacks/__init__.py +1 -1
  209. ultralytics/utils/callbacks/base.py +1 -3
  210. ultralytics/utils/callbacks/clearml.py +7 -6
  211. ultralytics/utils/callbacks/comet.py +39 -17
  212. ultralytics/utils/callbacks/dvc.py +1 -1
  213. ultralytics/utils/callbacks/hub.py +16 -16
  214. ultralytics/utils/callbacks/mlflow.py +28 -24
  215. ultralytics/utils/callbacks/neptune.py +6 -2
  216. ultralytics/utils/callbacks/raytune.py +3 -4
  217. ultralytics/utils/callbacks/tensorboard.py +18 -18
  218. ultralytics/utils/callbacks/wb.py +27 -20
  219. ultralytics/utils/checks.py +172 -100
  220. ultralytics/utils/dist.py +2 -1
  221. ultralytics/utils/downloads.py +40 -34
  222. ultralytics/utils/errors.py +1 -1
  223. ultralytics/utils/files.py +72 -38
  224. ultralytics/utils/instance.py +41 -19
  225. ultralytics/utils/loss.py +83 -55
  226. ultralytics/utils/metrics.py +61 -56
  227. ultralytics/utils/ops.py +94 -89
  228. ultralytics/utils/patches.py +30 -14
  229. ultralytics/utils/plotting.py +600 -269
  230. ultralytics/utils/tal.py +67 -26
  231. ultralytics/utils/torch_utils.py +305 -112
  232. ultralytics/utils/triton.py +2 -1
  233. ultralytics/utils/tuner.py +21 -12
  234. ultralytics-8.3.63.dist-info/METADATA +370 -0
  235. ultralytics-8.3.63.dist-info/RECORD +241 -0
  236. {ultralytics-8.1.29.dist-info → ultralytics-8.3.63.dist-info}/WHEEL +1 -1
  237. ultralytics/data/explorer/__init__.py +0 -5
  238. ultralytics/data/explorer/explorer.py +0 -472
  239. ultralytics/data/explorer/gui/__init__.py +0 -1
  240. ultralytics/data/explorer/gui/dash.py +0 -268
  241. ultralytics/data/explorer/utils.py +0 -166
  242. ultralytics/models/fastsam/prompt.py +0 -357
  243. ultralytics-8.1.29.dist-info/METADATA +0 -373
  244. ultralytics-8.1.29.dist-info/RECORD +0 -197
  245. {ultralytics-8.1.29.dist-info → ultralytics-8.3.63.dist-info}/LICENSE +0 -0
  246. {ultralytics-8.1.29.dist-info → ultralytics-8.3.63.dist-info}/entry_points.txt +0 -0
  247. {ultralytics-8.1.29.dist-info → ultralytics-8.3.63.dist-info}/top_level.txt +0 -0
@@ -1 +1,30 @@
1
- # Ultralytics YOLO 🚀, AGPL-3.0 license
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ from .ai_gym import AIGym
4
+ from .analytics import Analytics
5
+ from .distance_calculation import DistanceCalculation
6
+ from .heatmap import Heatmap
7
+ from .object_counter import ObjectCounter
8
+ from .parking_management import ParkingManagement, ParkingPtsSelection
9
+ from .queue_management import QueueManager
10
+ from .region_counter import RegionCounter
11
+ from .security_alarm import SecurityAlarm
12
+ from .speed_estimation import SpeedEstimator
13
+ from .streamlit_inference import Inference
14
+ from .trackzone import TrackZone
15
+
16
+ __all__ = (
17
+ "AIGym",
18
+ "DistanceCalculation",
19
+ "Heatmap",
20
+ "ObjectCounter",
21
+ "ParkingManagement",
22
+ "ParkingPtsSelection",
23
+ "QueueManager",
24
+ "SpeedEstimator",
25
+ "Analytics",
26
+ "Inference",
27
+ "RegionCounter",
28
+ "TrackZone",
29
+ "SecurityAlarm",
30
+ )
@@ -1,158 +1,111 @@
1
- # Ultralytics YOLO 🚀, AGPL-3.0 license
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- import cv2
4
-
5
- from ultralytics.utils.checks import check_imshow
3
+ from ultralytics.solutions.solutions import BaseSolution
6
4
  from ultralytics.utils.plotting import Annotator
7
5
 
8
6
 
9
- class AIGym:
10
- """A class to manage the gym steps of people in a real-time video stream based on their poses."""
11
-
12
- def __init__(self):
13
- """Initializes the AIGym with default values for Visual and Image parameters."""
14
-
15
- # Image and line thickness
16
- self.im0 = None
17
- self.tf = None
18
-
19
- # Keypoints and count information
20
- self.keypoints = None
21
- self.poseup_angle = None
22
- self.posedown_angle = None
23
- self.threshold = 0.001
24
-
25
- # Store stage, count and angle information
26
- self.angle = None
27
- self.count = None
28
- self.stage = None
29
- self.pose_type = "pushup"
30
- self.kpts_to_check = None
31
-
32
- # Visual Information
33
- self.view_img = False
34
- self.annotator = None
35
-
36
- # Check if environment support imshow
37
- self.env_check = check_imshow(warn=True)
38
-
39
- def set_args(
40
- self,
41
- kpts_to_check,
42
- line_thickness=2,
43
- view_img=False,
44
- pose_up_angle=145.0,
45
- pose_down_angle=90.0,
46
- pose_type="pullup",
47
- ):
7
+ class AIGym(BaseSolution):
8
+ """
9
+ A class to manage gym steps of people in a real-time video stream based on their poses.
10
+
11
+ This class extends BaseSolution to monitor workouts using YOLO pose estimation models. It tracks and counts
12
+ repetitions of exercises based on predefined angle thresholds for up and down positions.
13
+
14
+ Attributes:
15
+ count (List[int]): Repetition counts for each detected person.
16
+ angle (List[float]): Current angle of the tracked body part for each person.
17
+ stage (List[str]): Current exercise stage ('up', 'down', or '-') for each person.
18
+ initial_stage (str | None): Initial stage of the exercise.
19
+ up_angle (float): Angle threshold for considering the 'up' position of an exercise.
20
+ down_angle (float): Angle threshold for considering the 'down' position of an exercise.
21
+ kpts (List[int]): Indices of keypoints used for angle calculation.
22
+ annotator (Annotator): Object for drawing annotations on the image.
23
+
24
+ Methods:
25
+ monitor: Processes a frame to detect poses, calculate angles, and count repetitions.
26
+
27
+ Examples:
28
+ >>> gym = AIGym(model="yolov8n-pose.pt")
29
+ >>> image = cv2.imread("gym_scene.jpg")
30
+ >>> processed_image = gym.monitor(image)
31
+ >>> cv2.imshow("Processed Image", processed_image)
32
+ >>> cv2.waitKey(0)
33
+ """
34
+
35
+ def __init__(self, **kwargs):
36
+ """Initializes AIGym for workout monitoring using pose estimation and predefined angles."""
37
+ # Check if the model name ends with '-pose'
38
+ if "model" in kwargs and "-pose" not in kwargs["model"]:
39
+ kwargs["model"] = "yolo11n-pose.pt"
40
+ elif "model" not in kwargs:
41
+ kwargs["model"] = "yolo11n-pose.pt"
42
+
43
+ super().__init__(**kwargs)
44
+ self.count = [] # List for counts, necessary where there are multiple objects in frame
45
+ self.angle = [] # List for angle, necessary where there are multiple objects in frame
46
+ self.stage = [] # List for stage, necessary where there are multiple objects in frame
47
+
48
+ # Extract details from CFG single time for usage later
49
+ self.initial_stage = None
50
+ self.up_angle = float(self.CFG["up_angle"]) # Pose up predefined angle to consider up pose
51
+ self.down_angle = float(self.CFG["down_angle"]) # Pose down predefined angle to consider down pose
52
+ self.kpts = self.CFG["kpts"] # User selected kpts of workouts storage for further usage
53
+
54
+ def monitor(self, im0):
48
55
  """
49
- Configures the AIGym line_thickness, save image and view image parameters.
56
+ Monitors workouts using Ultralytics YOLO Pose Model.
50
57
 
51
- Args:
52
- kpts_to_check (list): 3 keypoints for counting
53
- line_thickness (int): Line thickness for bounding boxes.
54
- view_img (bool): display the im0
55
- pose_up_angle (float): Angle to set pose position up
56
- pose_down_angle (float): Angle to set pose position down
57
- pose_type (str): "pushup", "pullup" or "abworkout"
58
- """
59
- self.kpts_to_check = kpts_to_check
60
- self.tf = line_thickness
61
- self.view_img = view_img
62
- self.poseup_angle = pose_up_angle
63
- self.posedown_angle = pose_down_angle
64
- self.pose_type = pose_type
65
-
66
- def start_counting(self, im0, results, frame_count):
67
- """
68
- Function used to count the gym steps.
58
+ This function processes an input image to track and analyze human poses for workout monitoring. It uses
59
+ the YOLO Pose model to detect keypoints, estimate angles, and count repetitions based on predefined
60
+ angle thresholds.
69
61
 
70
62
  Args:
71
- im0 (ndarray): Current frame from the video stream.
72
- results (list): Pose estimation data
73
- frame_count (int): store current frame count
74
- """
75
- self.im0 = im0
76
- if frame_count == 1:
77
- self.count = [0] * len(results[0])
78
- self.angle = [0] * len(results[0])
79
- self.stage = ["-" for _ in results[0]]
80
- self.keypoints = results[0].keypoints.data
81
- self.annotator = Annotator(im0, line_width=2)
82
-
83
- num_keypoints = len(results[0])
84
-
85
- # Resize self.angle, self.count, and self.stage if the number of keypoints has changed
86
- if len(self.angle) != num_keypoints:
87
- self.angle = [0] * num_keypoints
88
- self.count = [0] * num_keypoints
89
- self.stage = ["-" for _ in range(num_keypoints)]
90
-
91
- for ind, k in enumerate(reversed(self.keypoints)):
92
- if self.pose_type in ["pushup", "pullup"]:
93
- self.angle[ind] = self.annotator.estimate_pose_angle(
94
- k[int(self.kpts_to_check[0])].cpu(),
95
- k[int(self.kpts_to_check[1])].cpu(),
96
- k[int(self.kpts_to_check[2])].cpu(),
97
- )
98
- self.im0 = self.annotator.draw_specific_points(k, self.kpts_to_check, shape=(640, 640), radius=10)
63
+ im0 (ndarray): Input image for processing.
99
64
 
100
- if self.pose_type == "abworkout":
101
- self.angle[ind] = self.annotator.estimate_pose_angle(
102
- k[int(self.kpts_to_check[0])].cpu(),
103
- k[int(self.kpts_to_check[1])].cpu(),
104
- k[int(self.kpts_to_check[2])].cpu(),
105
- )
106
- self.im0 = self.annotator.draw_specific_points(k, self.kpts_to_check, shape=(640, 640), radius=10)
107
- if self.angle[ind] > self.poseup_angle:
108
- self.stage[ind] = "down"
109
- if self.angle[ind] < self.posedown_angle and self.stage[ind] == "down":
110
- self.stage[ind] = "up"
111
- self.count[ind] += 1
112
- self.annotator.plot_angle_and_count_and_stage(
113
- angle_text=self.angle[ind],
114
- count_text=self.count[ind],
115
- stage_text=self.stage[ind],
116
- center_kpt=k[int(self.kpts_to_check[1])],
117
- line_thickness=self.tf,
118
- )
65
+ Returns:
66
+ (ndarray): Processed image with annotations for workout monitoring.
119
67
 
120
- if self.pose_type == "pushup":
121
- if self.angle[ind] > self.poseup_angle:
122
- self.stage[ind] = "up"
123
- if self.angle[ind] < self.posedown_angle and self.stage[ind] == "up":
124
- self.stage[ind] = "down"
125
- self.count[ind] += 1
126
- self.annotator.plot_angle_and_count_and_stage(
127
- angle_text=self.angle[ind],
128
- count_text=self.count[ind],
129
- stage_text=self.stage[ind],
130
- center_kpt=k[int(self.kpts_to_check[1])],
131
- line_thickness=self.tf,
132
- )
133
- if self.pose_type == "pullup":
134
- if self.angle[ind] > self.poseup_angle:
68
+ Examples:
69
+ >>> gym = AIGym()
70
+ >>> image = cv2.imread("workout.jpg")
71
+ >>> processed_image = gym.monitor(image)
72
+ """
73
+ # Extract tracks
74
+ tracks = self.model.track(source=im0, persist=True, classes=self.CFG["classes"], **self.track_add_args)[0]
75
+
76
+ if tracks.boxes.id is not None:
77
+ # Extract and check keypoints
78
+ if len(tracks) > len(self.count):
79
+ new_human = len(tracks) - len(self.count)
80
+ self.angle += [0] * new_human
81
+ self.count += [0] * new_human
82
+ self.stage += ["-"] * new_human
83
+
84
+ # Initialize annotator
85
+ self.annotator = Annotator(im0, line_width=self.line_width)
86
+
87
+ # Enumerate over keypoints
88
+ for ind, k in enumerate(reversed(tracks.keypoints.data)):
89
+ # Get keypoints and estimate the angle
90
+ kpts = [k[int(self.kpts[i])].cpu() for i in range(3)]
91
+ self.angle[ind] = self.annotator.estimate_pose_angle(*kpts)
92
+ im0 = self.annotator.draw_specific_points(k, self.kpts, radius=self.line_width * 3)
93
+
94
+ # Determine stage and count logic based on angle thresholds
95
+ if self.angle[ind] < self.down_angle:
96
+ if self.stage[ind] == "up":
97
+ self.count[ind] += 1
135
98
  self.stage[ind] = "down"
136
- if self.angle[ind] < self.posedown_angle and self.stage[ind] == "down":
99
+ elif self.angle[ind] > self.up_angle:
137
100
  self.stage[ind] = "up"
138
- self.count[ind] += 1
101
+
102
+ # Display angle, count, and stage text
139
103
  self.annotator.plot_angle_and_count_and_stage(
140
- angle_text=self.angle[ind],
141
- count_text=self.count[ind],
142
- stage_text=self.stage[ind],
143
- center_kpt=k[int(self.kpts_to_check[1])],
144
- line_thickness=self.tf,
104
+ angle_text=self.angle[ind], # angle text for display
105
+ count_text=self.count[ind], # count text for workouts
106
+ stage_text=self.stage[ind], # stage position text
107
+ center_kpt=k[int(self.kpts[1])], # center keypoint for display
145
108
  )
146
109
 
147
- self.annotator.kpts(k, shape=(640, 640), radius=1, kpt_line=True)
148
-
149
- if self.env_check and self.view_img:
150
- cv2.imshow("Ultralytics YOLOv8 AI GYM", self.im0)
151
- if cv2.waitKey(1) & 0xFF == ord("q"):
152
- return
153
-
154
- return self.im0
155
-
156
-
157
- if __name__ == "__main__":
158
- AIGym()
110
+ self.display_output(im0) # Display output image, if environment support display
111
+ return im0 # return an image for writing or further usage
@@ -0,0 +1,247 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ from itertools import cycle
4
+
5
+ import cv2
6
+ import matplotlib.pyplot as plt
7
+ import numpy as np
8
+ from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
9
+ from matplotlib.figure import Figure
10
+
11
+ from ultralytics.solutions.solutions import BaseSolution # Import a parent class
12
+
13
+
14
+ class Analytics(BaseSolution):
15
+ """
16
+ A class for creating and updating various types of charts for visual analytics.
17
+
18
+ This class extends BaseSolution to provide functionality for generating line, bar, pie, and area charts
19
+ based on object detection and tracking data.
20
+
21
+ Attributes:
22
+ type (str): The type of analytics chart to generate ('line', 'bar', 'pie', or 'area').
23
+ x_label (str): Label for the x-axis.
24
+ y_label (str): Label for the y-axis.
25
+ bg_color (str): Background color of the chart frame.
26
+ fg_color (str): Foreground color of the chart frame.
27
+ title (str): Title of the chart window.
28
+ max_points (int): Maximum number of data points to display on the chart.
29
+ fontsize (int): Font size for text display.
30
+ color_cycle (cycle): Cyclic iterator for chart colors.
31
+ total_counts (int): Total count of detected objects (used for line charts).
32
+ clswise_count (Dict[str, int]): Dictionary for class-wise object counts.
33
+ fig (Figure): Matplotlib figure object for the chart.
34
+ ax (Axes): Matplotlib axes object for the chart.
35
+ canvas (FigureCanvas): Canvas for rendering the chart.
36
+
37
+ Methods:
38
+ process_data: Processes image data and updates the chart.
39
+ update_graph: Updates the chart with new data points.
40
+
41
+ Examples:
42
+ >>> analytics = Analytics(analytics_type="line")
43
+ >>> frame = cv2.imread("image.jpg")
44
+ >>> processed_frame = analytics.process_data(frame, frame_number=1)
45
+ >>> cv2.imshow("Analytics", processed_frame)
46
+ """
47
+
48
+ def __init__(self, **kwargs):
49
+ """Initialize Analytics class with various chart types for visual data representation."""
50
+ super().__init__(**kwargs)
51
+
52
+ self.type = self.CFG["analytics_type"] # extract type of analytics
53
+ self.x_label = "Classes" if self.type in {"bar", "pie"} else "Frame#"
54
+ self.y_label = "Total Counts"
55
+
56
+ # Predefined data
57
+ self.bg_color = "#F3F3F3" # background color of frame
58
+ self.fg_color = "#111E68" # foreground color of frame
59
+ self.title = "Ultralytics Solutions" # window name
60
+ self.max_points = 45 # maximum points to be drawn on window
61
+ self.fontsize = 25 # text font size for display
62
+ figsize = (19.2, 10.8) # Set output image size 1920 * 1080
63
+ self.color_cycle = cycle(["#DD00BA", "#042AFF", "#FF4447", "#7D24FF", "#BD00FF"])
64
+
65
+ self.total_counts = 0 # count variable for storing total counts i.e. for line
66
+ self.clswise_count = {} # dictionary for class-wise counts
67
+
68
+ # Ensure line and area chart
69
+ if self.type in {"line", "area"}:
70
+ self.lines = {}
71
+ self.fig = Figure(facecolor=self.bg_color, figsize=figsize)
72
+ self.canvas = FigureCanvas(self.fig) # Set common axis properties
73
+ self.ax = self.fig.add_subplot(111, facecolor=self.bg_color)
74
+ if self.type == "line":
75
+ (self.line,) = self.ax.plot([], [], color="cyan", linewidth=self.line_width)
76
+ elif self.type in {"bar", "pie"}:
77
+ # Initialize bar or pie plot
78
+ self.fig, self.ax = plt.subplots(figsize=figsize, facecolor=self.bg_color)
79
+ self.canvas = FigureCanvas(self.fig) # Set common axis properties
80
+ self.ax.set_facecolor(self.bg_color)
81
+ self.color_mapping = {}
82
+
83
+ if self.type == "pie": # Ensure pie chart is circular
84
+ self.ax.axis("equal")
85
+
86
+ def process_data(self, im0, frame_number):
87
+ """
88
+ Processes image data and runs object tracking to update analytics charts.
89
+
90
+ Args:
91
+ im0 (np.ndarray): Input image for processing.
92
+ frame_number (int): Video frame number for plotting the data.
93
+
94
+ Returns:
95
+ (np.ndarray): Processed image with updated analytics chart.
96
+
97
+ Raises:
98
+ ModuleNotFoundError: If an unsupported chart type is specified.
99
+
100
+ Examples:
101
+ >>> analytics = Analytics(analytics_type="line")
102
+ >>> frame = np.zeros((480, 640, 3), dtype=np.uint8)
103
+ >>> processed_frame = analytics.process_data(frame, frame_number=1)
104
+ """
105
+ self.extract_tracks(im0) # Extract tracks
106
+
107
+ if self.type == "line":
108
+ for _ in self.boxes:
109
+ self.total_counts += 1
110
+ im0 = self.update_graph(frame_number=frame_number)
111
+ self.total_counts = 0
112
+ elif self.type in {"pie", "bar", "area"}:
113
+ self.clswise_count = {}
114
+ for box, cls in zip(self.boxes, self.clss):
115
+ if self.names[int(cls)] in self.clswise_count:
116
+ self.clswise_count[self.names[int(cls)]] += 1
117
+ else:
118
+ self.clswise_count[self.names[int(cls)]] = 1
119
+ im0 = self.update_graph(frame_number=frame_number, count_dict=self.clswise_count, plot=self.type)
120
+ else:
121
+ raise ModuleNotFoundError(f"{self.type} chart is not supported ❌")
122
+ return im0
123
+
124
+ def update_graph(self, frame_number, count_dict=None, plot="line"):
125
+ """
126
+ Updates the graph with new data for single or multiple classes.
127
+
128
+ Args:
129
+ frame_number (int): The current frame number.
130
+ count_dict (Dict[str, int] | None): Dictionary with class names as keys and counts as values for multiple
131
+ classes. If None, updates a single line graph.
132
+ plot (str): Type of the plot. Options are 'line', 'bar', 'pie', or 'area'.
133
+
134
+ Returns:
135
+ (np.ndarray): Updated image containing the graph.
136
+
137
+ Examples:
138
+ >>> analytics = Analytics()
139
+ >>> frame_number = 10
140
+ >>> count_dict = {"person": 5, "car": 3}
141
+ >>> updated_image = analytics.update_graph(frame_number, count_dict, plot="bar")
142
+ """
143
+ if count_dict is None:
144
+ # Single line update
145
+ x_data = np.append(self.line.get_xdata(), float(frame_number))
146
+ y_data = np.append(self.line.get_ydata(), float(self.total_counts))
147
+
148
+ if len(x_data) > self.max_points:
149
+ x_data, y_data = x_data[-self.max_points :], y_data[-self.max_points :]
150
+
151
+ self.line.set_data(x_data, y_data)
152
+ self.line.set_label("Counts")
153
+ self.line.set_color("#7b0068") # Pink color
154
+ self.line.set_marker("*")
155
+ self.line.set_markersize(self.line_width * 5)
156
+ else:
157
+ labels = list(count_dict.keys())
158
+ counts = list(count_dict.values())
159
+ if plot == "area":
160
+ color_cycle = cycle(["#DD00BA", "#042AFF", "#FF4447", "#7D24FF", "#BD00FF"])
161
+ # Multiple lines or area update
162
+ x_data = self.ax.lines[0].get_xdata() if self.ax.lines else np.array([])
163
+ y_data_dict = {key: np.array([]) for key in count_dict.keys()}
164
+ if self.ax.lines:
165
+ for line, key in zip(self.ax.lines, count_dict.keys()):
166
+ y_data_dict[key] = line.get_ydata()
167
+
168
+ x_data = np.append(x_data, float(frame_number))
169
+ max_length = len(x_data)
170
+ for key in count_dict.keys():
171
+ y_data_dict[key] = np.append(y_data_dict[key], float(count_dict[key]))
172
+ if len(y_data_dict[key]) < max_length:
173
+ y_data_dict[key] = np.pad(y_data_dict[key], (0, max_length - len(y_data_dict[key])))
174
+ if len(x_data) > self.max_points:
175
+ x_data = x_data[1:]
176
+ for key in count_dict.keys():
177
+ y_data_dict[key] = y_data_dict[key][1:]
178
+
179
+ self.ax.clear()
180
+ for key, y_data in y_data_dict.items():
181
+ color = next(color_cycle)
182
+ self.ax.fill_between(x_data, y_data, color=color, alpha=0.7)
183
+ self.ax.plot(
184
+ x_data,
185
+ y_data,
186
+ color=color,
187
+ linewidth=self.line_width,
188
+ marker="o",
189
+ markersize=self.line_width * 5,
190
+ label=f"{key} Data Points",
191
+ )
192
+ if plot == "bar":
193
+ self.ax.clear() # clear bar data
194
+ for label in labels: # Map labels to colors
195
+ if label not in self.color_mapping:
196
+ self.color_mapping[label] = next(self.color_cycle)
197
+ colors = [self.color_mapping[label] for label in labels]
198
+ bars = self.ax.bar(labels, counts, color=colors)
199
+ for bar, count in zip(bars, counts):
200
+ self.ax.text(
201
+ bar.get_x() + bar.get_width() / 2,
202
+ bar.get_height(),
203
+ str(count),
204
+ ha="center",
205
+ va="bottom",
206
+ color=self.fg_color,
207
+ )
208
+ # Create the legend using labels from the bars
209
+ for bar, label in zip(bars, labels):
210
+ bar.set_label(label) # Assign label to each bar
211
+ self.ax.legend(loc="upper left", fontsize=13, facecolor=self.fg_color, edgecolor=self.fg_color)
212
+ if plot == "pie":
213
+ total = sum(counts)
214
+ percentages = [size / total * 100 for size in counts]
215
+ start_angle = 90
216
+ self.ax.clear()
217
+
218
+ # Create pie chart and create legend labels with percentages
219
+ wedges, autotexts = self.ax.pie(
220
+ counts, labels=labels, startangle=start_angle, textprops={"color": self.fg_color}, autopct=None
221
+ )
222
+ legend_labels = [f"{label} ({percentage:.1f}%)" for label, percentage in zip(labels, percentages)]
223
+
224
+ # Assign the legend using the wedges and manually created labels
225
+ self.ax.legend(wedges, legend_labels, title="Classes", loc="center left", bbox_to_anchor=(1, 0, 0.5, 1))
226
+ self.fig.subplots_adjust(left=0.1, right=0.75) # Adjust layout to fit the legend
227
+
228
+ # Common plot settings
229
+ self.ax.set_facecolor("#f0f0f0") # Set to light gray or any other color you like
230
+ self.ax.set_title(self.title, color=self.fg_color, fontsize=self.fontsize)
231
+ self.ax.set_xlabel(self.x_label, color=self.fg_color, fontsize=self.fontsize - 3)
232
+ self.ax.set_ylabel(self.y_label, color=self.fg_color, fontsize=self.fontsize - 3)
233
+
234
+ # Add and format legend
235
+ legend = self.ax.legend(loc="upper left", fontsize=13, facecolor=self.bg_color, edgecolor=self.bg_color)
236
+ for text in legend.get_texts():
237
+ text.set_color(self.fg_color)
238
+
239
+ # Redraw graph, update view, capture, and display the updated plot
240
+ self.ax.relim()
241
+ self.ax.autoscale_view()
242
+ self.canvas.draw()
243
+ im0 = np.array(self.canvas.renderer.buffer_rgba())
244
+ im0 = cv2.cvtColor(im0[:, :, :3], cv2.COLOR_RGBA2BGR)
245
+ self.display_output(im0)
246
+
247
+ return im0 # Return the image