ultralytics 8.3.15__tar.gz → 8.3.16__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (244) hide show
  1. {ultralytics-8.3.15/ultralytics.egg-info → ultralytics-8.3.16}/PKG-INFO +11 -11
  2. {ultralytics-8.3.15 → ultralytics-8.3.16}/README.md +9 -9
  3. {ultralytics-8.3.15 → ultralytics-8.3.16}/pyproject.toml +1 -1
  4. {ultralytics-8.3.15 → ultralytics-8.3.16}/tests/test_solutions.py +20 -16
  5. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/__init__.py +1 -1
  6. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/solutions/default.yaml +1 -0
  7. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/data/split_dota.py +3 -3
  8. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/solutions/ai_gym.py +43 -9
  9. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/solutions/analytics.py +65 -12
  10. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/solutions/distance_calculation.py +50 -10
  11. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/solutions/heatmap.py +50 -14
  12. ultralytics-8.3.16/ultralytics/solutions/object_counter.py +187 -0
  13. ultralytics-8.3.16/ultralytics/solutions/parking_management.py +236 -0
  14. ultralytics-8.3.16/ultralytics/solutions/queue_management.py +109 -0
  15. ultralytics-8.3.16/ultralytics/solutions/solutions.py +150 -0
  16. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/solutions/speed_estimation.py +41 -7
  17. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/solutions/streamlit_inference.py +2 -3
  18. {ultralytics-8.3.15 → ultralytics-8.3.16/ultralytics.egg-info}/PKG-INFO +11 -11
  19. ultralytics-8.3.15/ultralytics/solutions/object_counter.py +0 -131
  20. ultralytics-8.3.15/ultralytics/solutions/parking_management.py +0 -241
  21. ultralytics-8.3.15/ultralytics/solutions/queue_management.py +0 -64
  22. ultralytics-8.3.15/ultralytics/solutions/solutions.py +0 -95
  23. {ultralytics-8.3.15 → ultralytics-8.3.16}/LICENSE +0 -0
  24. {ultralytics-8.3.15 → ultralytics-8.3.16}/setup.cfg +0 -0
  25. {ultralytics-8.3.15 → ultralytics-8.3.16}/tests/__init__.py +0 -0
  26. {ultralytics-8.3.15 → ultralytics-8.3.16}/tests/conftest.py +0 -0
  27. {ultralytics-8.3.15 → ultralytics-8.3.16}/tests/test_cli.py +0 -0
  28. {ultralytics-8.3.15 → ultralytics-8.3.16}/tests/test_cuda.py +0 -0
  29. {ultralytics-8.3.15 → ultralytics-8.3.16}/tests/test_engine.py +0 -0
  30. {ultralytics-8.3.15 → ultralytics-8.3.16}/tests/test_exports.py +0 -0
  31. {ultralytics-8.3.15 → ultralytics-8.3.16}/tests/test_integrations.py +0 -0
  32. {ultralytics-8.3.15 → ultralytics-8.3.16}/tests/test_python.py +0 -0
  33. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/assets/bus.jpg +0 -0
  34. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/assets/zidane.jpg +0 -0
  35. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/__init__.py +0 -0
  36. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/Argoverse.yaml +0 -0
  37. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/DOTAv1.5.yaml +0 -0
  38. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/DOTAv1.yaml +0 -0
  39. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/GlobalWheat2020.yaml +0 -0
  40. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/ImageNet.yaml +0 -0
  41. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/Objects365.yaml +0 -0
  42. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/SKU-110K.yaml +0 -0
  43. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/VOC.yaml +0 -0
  44. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/VisDrone.yaml +0 -0
  45. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/african-wildlife.yaml +0 -0
  46. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/brain-tumor.yaml +0 -0
  47. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/carparts-seg.yaml +0 -0
  48. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/coco-pose.yaml +0 -0
  49. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/coco.yaml +0 -0
  50. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/coco128-seg.yaml +0 -0
  51. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/coco128.yaml +0 -0
  52. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/coco8-pose.yaml +0 -0
  53. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/coco8-seg.yaml +0 -0
  54. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/coco8.yaml +0 -0
  55. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/crack-seg.yaml +0 -0
  56. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/dota8.yaml +0 -0
  57. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/hand-keypoints.yaml +0 -0
  58. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/lvis.yaml +0 -0
  59. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/open-images-v7.yaml +0 -0
  60. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/package-seg.yaml +0 -0
  61. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/signature.yaml +0 -0
  62. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/tiger-pose.yaml +0 -0
  63. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/datasets/xView.yaml +0 -0
  64. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/default.yaml +0 -0
  65. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/11/yolo11-cls.yaml +0 -0
  66. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/11/yolo11-obb.yaml +0 -0
  67. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/11/yolo11-pose.yaml +0 -0
  68. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/11/yolo11-seg.yaml +0 -0
  69. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/11/yolo11.yaml +0 -0
  70. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +0 -0
  71. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +0 -0
  72. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +0 -0
  73. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +0 -0
  74. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v10/yolov10b.yaml +0 -0
  75. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v10/yolov10l.yaml +0 -0
  76. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v10/yolov10m.yaml +0 -0
  77. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v10/yolov10n.yaml +0 -0
  78. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v10/yolov10s.yaml +0 -0
  79. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v10/yolov10x.yaml +0 -0
  80. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v3/yolov3-spp.yaml +0 -0
  81. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v3/yolov3-tiny.yaml +0 -0
  82. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v3/yolov3.yaml +0 -0
  83. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v5/yolov5-p6.yaml +0 -0
  84. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v5/yolov5.yaml +0 -0
  85. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v6/yolov6.yaml +0 -0
  86. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +0 -0
  87. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +0 -0
  88. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-cls.yaml +0 -0
  89. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +0 -0
  90. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +0 -0
  91. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-ghost.yaml +0 -0
  92. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-obb.yaml +0 -0
  93. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-p2.yaml +0 -0
  94. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-p6.yaml +0 -0
  95. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +0 -0
  96. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-pose.yaml +0 -0
  97. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +0 -0
  98. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +0 -0
  99. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-seg.yaml +0 -0
  100. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-world.yaml +0 -0
  101. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8-worldv2.yaml +0 -0
  102. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v8/yolov8.yaml +0 -0
  103. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v9/yolov9c-seg.yaml +0 -0
  104. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v9/yolov9c.yaml +0 -0
  105. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v9/yolov9e-seg.yaml +0 -0
  106. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v9/yolov9e.yaml +0 -0
  107. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v9/yolov9m.yaml +0 -0
  108. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v9/yolov9s.yaml +0 -0
  109. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/models/v9/yolov9t.yaml +0 -0
  110. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/trackers/botsort.yaml +0 -0
  111. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/cfg/trackers/bytetrack.yaml +0 -0
  112. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/data/__init__.py +0 -0
  113. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/data/annotator.py +0 -0
  114. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/data/augment.py +0 -0
  115. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/data/base.py +0 -0
  116. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/data/build.py +0 -0
  117. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/data/converter.py +0 -0
  118. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/data/dataset.py +0 -0
  119. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/data/loaders.py +0 -0
  120. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/data/utils.py +0 -0
  121. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/engine/__init__.py +0 -0
  122. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/engine/exporter.py +0 -0
  123. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/engine/model.py +0 -0
  124. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/engine/predictor.py +0 -0
  125. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/engine/results.py +0 -0
  126. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/engine/trainer.py +0 -0
  127. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/engine/tuner.py +0 -0
  128. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/engine/validator.py +0 -0
  129. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/hub/__init__.py +0 -0
  130. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/hub/auth.py +0 -0
  131. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/hub/google/__init__.py +0 -0
  132. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/hub/session.py +0 -0
  133. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/hub/utils.py +0 -0
  134. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/__init__.py +0 -0
  135. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/fastsam/__init__.py +0 -0
  136. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/fastsam/model.py +0 -0
  137. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/fastsam/predict.py +0 -0
  138. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/fastsam/utils.py +0 -0
  139. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/fastsam/val.py +0 -0
  140. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/nas/__init__.py +0 -0
  141. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/nas/model.py +0 -0
  142. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/nas/predict.py +0 -0
  143. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/nas/val.py +0 -0
  144. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/rtdetr/__init__.py +0 -0
  145. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/rtdetr/model.py +0 -0
  146. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/rtdetr/predict.py +0 -0
  147. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/rtdetr/train.py +0 -0
  148. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/rtdetr/val.py +0 -0
  149. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/__init__.py +0 -0
  150. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/amg.py +0 -0
  151. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/build.py +0 -0
  152. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/model.py +0 -0
  153. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/modules/__init__.py +0 -0
  154. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/modules/blocks.py +0 -0
  155. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/modules/decoders.py +0 -0
  156. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/modules/encoders.py +0 -0
  157. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/modules/memory_attention.py +0 -0
  158. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/modules/sam.py +0 -0
  159. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/modules/tiny_encoder.py +0 -0
  160. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/modules/transformer.py +0 -0
  161. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/modules/utils.py +0 -0
  162. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/sam/predict.py +0 -0
  163. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/utils/__init__.py +0 -0
  164. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/utils/loss.py +0 -0
  165. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/utils/ops.py +0 -0
  166. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/__init__.py +0 -0
  167. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/classify/__init__.py +0 -0
  168. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/classify/predict.py +0 -0
  169. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/classify/train.py +0 -0
  170. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/classify/val.py +0 -0
  171. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/detect/__init__.py +0 -0
  172. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/detect/predict.py +0 -0
  173. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/detect/train.py +0 -0
  174. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/detect/val.py +0 -0
  175. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/model.py +0 -0
  176. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/obb/__init__.py +0 -0
  177. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/obb/predict.py +0 -0
  178. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/obb/train.py +0 -0
  179. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/obb/val.py +0 -0
  180. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/pose/__init__.py +0 -0
  181. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/pose/predict.py +0 -0
  182. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/pose/train.py +0 -0
  183. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/pose/val.py +0 -0
  184. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/segment/__init__.py +0 -0
  185. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/segment/predict.py +0 -0
  186. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/segment/train.py +0 -0
  187. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/segment/val.py +0 -0
  188. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/world/__init__.py +0 -0
  189. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/world/train.py +0 -0
  190. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/models/yolo/world/train_world.py +0 -0
  191. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/nn/__init__.py +0 -0
  192. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/nn/autobackend.py +0 -0
  193. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/nn/modules/__init__.py +0 -0
  194. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/nn/modules/activation.py +0 -0
  195. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/nn/modules/block.py +0 -0
  196. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/nn/modules/conv.py +0 -0
  197. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/nn/modules/head.py +0 -0
  198. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/nn/modules/transformer.py +0 -0
  199. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/nn/modules/utils.py +0 -0
  200. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/nn/tasks.py +0 -0
  201. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/solutions/__init__.py +0 -0
  202. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/trackers/__init__.py +0 -0
  203. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/trackers/basetrack.py +0 -0
  204. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/trackers/bot_sort.py +0 -0
  205. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/trackers/byte_tracker.py +0 -0
  206. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/trackers/track.py +0 -0
  207. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/trackers/utils/__init__.py +0 -0
  208. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/trackers/utils/gmc.py +0 -0
  209. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/trackers/utils/kalman_filter.py +0 -0
  210. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/trackers/utils/matching.py +0 -0
  211. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/__init__.py +0 -0
  212. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/autobatch.py +0 -0
  213. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/benchmarks.py +0 -0
  214. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/callbacks/__init__.py +0 -0
  215. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/callbacks/base.py +0 -0
  216. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/callbacks/clearml.py +0 -0
  217. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/callbacks/comet.py +0 -0
  218. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/callbacks/dvc.py +0 -0
  219. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/callbacks/hub.py +0 -0
  220. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/callbacks/mlflow.py +0 -0
  221. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/callbacks/neptune.py +0 -0
  222. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/callbacks/raytune.py +0 -0
  223. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/callbacks/tensorboard.py +0 -0
  224. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/callbacks/wb.py +0 -0
  225. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/checks.py +0 -0
  226. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/dist.py +0 -0
  227. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/downloads.py +0 -0
  228. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/errors.py +0 -0
  229. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/files.py +0 -0
  230. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/instance.py +0 -0
  231. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/loss.py +0 -0
  232. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/metrics.py +0 -0
  233. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/ops.py +0 -0
  234. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/patches.py +0 -0
  235. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/plotting.py +0 -0
  236. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/tal.py +0 -0
  237. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/torch_utils.py +0 -0
  238. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/triton.py +0 -0
  239. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics/utils/tuner.py +0 -0
  240. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics.egg-info/SOURCES.txt +0 -0
  241. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics.egg-info/dependency_links.txt +0 -0
  242. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics.egg-info/entry_points.txt +0 -0
  243. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics.egg-info/requires.txt +0 -0
  244. {ultralytics-8.3.15 → ultralytics-8.3.16}/ultralytics.egg-info/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.3.15
4
- Summary: Ultralytics YOLO for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
3
+ Version: 8.3.16
4
+ Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
7
7
  License: AGPL-3.0
@@ -203,7 +203,7 @@ See YOLO [Python Docs](https://docs.ultralytics.com/usage/python/) for more exam
203
203
 
204
204
  YOLO11 [Detect](https://docs.ultralytics.com/tasks/detect/), [Segment](https://docs.ultralytics.com/tasks/segment/) and [Pose](https://docs.ultralytics.com/tasks/pose/) models pretrained on the [COCO](https://docs.ultralytics.com/datasets/detect/coco/) dataset are available here, as well as YOLO11 [Classify](https://docs.ultralytics.com/tasks/classify/) models pretrained on the [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/) dataset. [Track](https://docs.ultralytics.com/modes/track/) mode is available for all Detect, Segment and Pose models.
205
205
 
206
- <img width="1024" src="https://raw.githubusercontent.com/ultralytics/assets/main/im/banner-tasks.png" alt="Ultralytics YOLO supported tasks">
206
+ <img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/main/im/banner-tasks.png" alt="Ultralytics YOLO supported tasks">
207
207
 
208
208
  All [Models](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) download automatically from the latest Ultralytics [release](https://github.com/ultralytics/assets/releases) on first use.
209
209
 
@@ -294,7 +294,7 @@ See [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples with
294
294
 
295
295
  ## <div align="center">Integrations</div>
296
296
 
297
- Our key integrations with leading AI platforms extend the functionality of Ultralytics' offerings, enhancing tasks like dataset labeling, training, visualization, and model management. Discover how Ultralytics, in collaboration with [Roboflow](https://roboflow.com/?ref=ultralytics), ClearML, [Comet](https://bit.ly/yolov8-readme-comet), Neural Magic and [OpenVINO](https://docs.ultralytics.com/integrations/openvino/), can optimize your AI workflow.
297
+ Our key integrations with leading AI platforms extend the functionality of Ultralytics' offerings, enhancing tasks like dataset labeling, training, visualization, and model management. Discover how Ultralytics, in collaboration with [W&B](https://docs.wandb.ai/guides/integrations/ultralytics/), [Comet](https://bit.ly/yolov8-readme-comet), [Roboflow](https://roboflow.com/?ref=ultralytics) and [OpenVINO](https://docs.ultralytics.com/integrations/openvino/), can optimize your AI workflow.
298
298
 
299
299
  <br>
300
300
  <a href="https://www.ultralytics.com/hub" target="_blank">
@@ -303,11 +303,11 @@ Our key integrations with leading AI platforms extend the functionality of Ultra
303
303
  <br>
304
304
 
305
305
  <div align="center">
306
- <a href="https://roboflow.com/?ref=ultralytics">
307
- <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-roboflow.png" width="10%" alt="Roboflow logo"></a>
306
+ <a href="https://www.ultralytics.com/hub">
307
+ <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-ultralytics-hub.png" width="10%" alt="Ultralytics HUB logo"></a>
308
308
  <img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="15%" height="0" alt="space">
309
- <a href="https://clear.ml/">
310
- <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-clearml.png" width="10%" alt="ClearML logo"></a>
309
+ <a href="https://docs.wandb.ai/guides/integrations/ultralytics/">
310
+ <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-wb.png" width="10%" alt="ClearML logo"></a>
311
311
  <img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="15%" height="0" alt="space">
312
312
  <a href="https://bit.ly/yolov8-readme-comet">
313
313
  <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-comet.png" width="10%" alt="Comet ML logo"></a>
@@ -316,9 +316,9 @@ Our key integrations with leading AI platforms extend the functionality of Ultra
316
316
  <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-neuralmagic.png" width="10%" alt="NeuralMagic logo"></a>
317
317
  </div>
318
318
 
319
- | Roboflow | ClearML NEW | Comet ⭐ NEW | Neural Magic ⭐ NEW |
320
- | :--------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------: |
321
- | Label and export your custom datasets directly to YOLO11 for training with [Roboflow](https://roboflow.com/?ref=ultralytics) | Automatically track, visualize and even remotely train YOLO11 using [ClearML](https://clear.ml/) (open-source!) | Free forever, [Comet](https://bit.ly/yolov5-readme-comet) lets you save YOLO11 models, resume training, and interactively visualize and debug predictions | Run YOLO11 inference up to 6x faster with [Neural Magic DeepSparse](https://bit.ly/yolov5-neuralmagic) |
319
+ | Ultralytics HUB 🚀 | W&B | Comet ⭐ NEW | Neural Magic |
320
+ | :----------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------: |
321
+ | Streamline YOLO workflows: Label, train, and deploy effortlessly with [Ultralytics HUB](https://ultralytics.com/hub). Try now! | Track experiments, hyperparameters, and results with [Weights & Biases](https://docs.wandb.ai/guides/integrations/ultralytics/) | Free forever, [Comet](https://bit.ly/yolov5-readme-comet) lets you save YOLO11 models, resume training, and interactively visualize and debug predictions | Run YOLO11 inference up to 6x faster with [Neural Magic DeepSparse](https://bit.ly/yolov5-neuralmagic) |
322
322
 
323
323
  ## <div align="center">Ultralytics HUB</div>
324
324
 
@@ -116,7 +116,7 @@ See YOLO [Python Docs](https://docs.ultralytics.com/usage/python/) for more exam
116
116
 
117
117
  YOLO11 [Detect](https://docs.ultralytics.com/tasks/detect/), [Segment](https://docs.ultralytics.com/tasks/segment/) and [Pose](https://docs.ultralytics.com/tasks/pose/) models pretrained on the [COCO](https://docs.ultralytics.com/datasets/detect/coco/) dataset are available here, as well as YOLO11 [Classify](https://docs.ultralytics.com/tasks/classify/) models pretrained on the [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/) dataset. [Track](https://docs.ultralytics.com/modes/track/) mode is available for all Detect, Segment and Pose models.
118
118
 
119
- <img width="1024" src="https://raw.githubusercontent.com/ultralytics/assets/main/im/banner-tasks.png" alt="Ultralytics YOLO supported tasks">
119
+ <img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/main/im/banner-tasks.png" alt="Ultralytics YOLO supported tasks">
120
120
 
121
121
  All [Models](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cfg/models) download automatically from the latest Ultralytics [release](https://github.com/ultralytics/assets/releases) on first use.
122
122
 
@@ -207,7 +207,7 @@ See [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples with
207
207
 
208
208
  ## <div align="center">Integrations</div>
209
209
 
210
- Our key integrations with leading AI platforms extend the functionality of Ultralytics' offerings, enhancing tasks like dataset labeling, training, visualization, and model management. Discover how Ultralytics, in collaboration with [Roboflow](https://roboflow.com/?ref=ultralytics), ClearML, [Comet](https://bit.ly/yolov8-readme-comet), Neural Magic and [OpenVINO](https://docs.ultralytics.com/integrations/openvino/), can optimize your AI workflow.
210
+ Our key integrations with leading AI platforms extend the functionality of Ultralytics' offerings, enhancing tasks like dataset labeling, training, visualization, and model management. Discover how Ultralytics, in collaboration with [W&B](https://docs.wandb.ai/guides/integrations/ultralytics/), [Comet](https://bit.ly/yolov8-readme-comet), [Roboflow](https://roboflow.com/?ref=ultralytics) and [OpenVINO](https://docs.ultralytics.com/integrations/openvino/), can optimize your AI workflow.
211
211
 
212
212
  <br>
213
213
  <a href="https://www.ultralytics.com/hub" target="_blank">
@@ -216,11 +216,11 @@ Our key integrations with leading AI platforms extend the functionality of Ultra
216
216
  <br>
217
217
 
218
218
  <div align="center">
219
- <a href="https://roboflow.com/?ref=ultralytics">
220
- <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-roboflow.png" width="10%" alt="Roboflow logo"></a>
219
+ <a href="https://www.ultralytics.com/hub">
220
+ <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-ultralytics-hub.png" width="10%" alt="Ultralytics HUB logo"></a>
221
221
  <img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="15%" height="0" alt="space">
222
- <a href="https://clear.ml/">
223
- <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-clearml.png" width="10%" alt="ClearML logo"></a>
222
+ <a href="https://docs.wandb.ai/guides/integrations/ultralytics/">
223
+ <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-wb.png" width="10%" alt="ClearML logo"></a>
224
224
  <img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="15%" height="0" alt="space">
225
225
  <a href="https://bit.ly/yolov8-readme-comet">
226
226
  <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-comet.png" width="10%" alt="Comet ML logo"></a>
@@ -229,9 +229,9 @@ Our key integrations with leading AI platforms extend the functionality of Ultra
229
229
  <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-neuralmagic.png" width="10%" alt="NeuralMagic logo"></a>
230
230
  </div>
231
231
 
232
- | Roboflow | ClearML NEW | Comet ⭐ NEW | Neural Magic ⭐ NEW |
233
- | :--------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------: |
234
- | Label and export your custom datasets directly to YOLO11 for training with [Roboflow](https://roboflow.com/?ref=ultralytics) | Automatically track, visualize and even remotely train YOLO11 using [ClearML](https://clear.ml/) (open-source!) | Free forever, [Comet](https://bit.ly/yolov5-readme-comet) lets you save YOLO11 models, resume training, and interactively visualize and debug predictions | Run YOLO11 inference up to 6x faster with [Neural Magic DeepSparse](https://bit.ly/yolov5-neuralmagic) |
232
+ | Ultralytics HUB 🚀 | W&B | Comet ⭐ NEW | Neural Magic |
233
+ | :----------------------------------------------------------------------------------------------------------------------------: | :-----------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------: |
234
+ | Streamline YOLO workflows: Label, train, and deploy effortlessly with [Ultralytics HUB](https://ultralytics.com/hub). Try now! | Track experiments, hyperparameters, and results with [Weights & Biases](https://docs.wandb.ai/guides/integrations/ultralytics/) | Free forever, [Comet](https://bit.ly/yolov5-readme-comet) lets you save YOLO11 models, resume training, and interactively visualize and debug predictions | Run YOLO11 inference up to 6x faster with [Neural Magic DeepSparse](https://bit.ly/yolov5-neuralmagic) |
235
235
 
236
236
  ## <div align="center">Ultralytics HUB</div>
237
237
 
@@ -26,7 +26,7 @@ build-backend = "setuptools.build_meta"
26
26
  [project]
27
27
  name = "ultralytics"
28
28
  dynamic = ["version"]
29
- description = "Ultralytics YOLO for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification."
29
+ description = "Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification."
30
30
  readme = "README.md"
31
31
  requires-python = ">=3.8"
32
32
  license = { "text" = "AGPL-3.0" }
@@ -17,10 +17,15 @@ def test_major_solutions():
17
17
  cap = cv2.VideoCapture("solutions_ci_demo.mp4")
18
18
  assert cap.isOpened(), "Error reading video file"
19
19
  region_points = [(20, 400), (1080, 404), (1080, 360), (20, 360)]
20
- counter = solutions.ObjectCounter(region=region_points, model="yolo11n.pt", show=False)
21
- heatmap = solutions.Heatmap(colormap=cv2.COLORMAP_PARULA, model="yolo11n.pt", show=False)
22
- speed = solutions.SpeedEstimator(region=region_points, model="yolo11n.pt", show=False)
23
- queue = solutions.QueueManager(region=region_points, model="yolo11n.pt", show=False)
20
+ counter = solutions.ObjectCounter(region=region_points, model="yolo11n.pt", show=False) # Test object counter
21
+ heatmap = solutions.Heatmap(colormap=cv2.COLORMAP_PARULA, model="yolo11n.pt", show=False) # Test heatmaps
22
+ speed = solutions.SpeedEstimator(region=region_points, model="yolo11n.pt", show=False) # Test queue manager
23
+ queue = solutions.QueueManager(region=region_points, model="yolo11n.pt", show=False) # Test speed estimation
24
+ line_analytics = solutions.Analytics(analytics_type="line", model="yolo11n.pt", show=False) # line analytics
25
+ pie_analytics = solutions.Analytics(analytics_type="pie", model="yolo11n.pt", show=False) # line analytics
26
+ bar_analytics = solutions.Analytics(analytics_type="bar", model="yolo11n.pt", show=False) # line analytics
27
+ area_analytics = solutions.Analytics(analytics_type="area", model="yolo11n.pt", show=False) # line analytics
28
+ frame_count = 0 # Required for analytics
24
29
  while cap.isOpened():
25
30
  success, im0 = cap.read()
26
31
  if not success:
@@ -30,24 +35,23 @@ def test_major_solutions():
30
35
  _ = heatmap.generate_heatmap(original_im0.copy())
31
36
  _ = speed.estimate_speed(original_im0.copy())
32
37
  _ = queue.process_queue(original_im0.copy())
38
+ _ = line_analytics.process_data(original_im0.copy(), frame_count)
39
+ _ = pie_analytics.process_data(original_im0.copy(), frame_count)
40
+ _ = bar_analytics.process_data(original_im0.copy(), frame_count)
41
+ _ = area_analytics.process_data(original_im0.copy(), frame_count)
33
42
  cap.release()
34
- cv2.destroyAllWindows()
35
-
36
43
 
37
- @pytest.mark.slow
38
- def test_aigym():
39
- """Test the workouts monitoring solution."""
44
+ # Test workouts monitoring
40
45
  safe_download(url=WORKOUTS_SOLUTION_DEMO)
41
- cap = cv2.VideoCapture("solution_ci_pose_demo.mp4")
42
- assert cap.isOpened(), "Error reading video file"
43
- gym = solutions.AIGym(line_width=2, kpts=[5, 11, 13])
44
- while cap.isOpened():
45
- success, im0 = cap.read()
46
+ cap1 = cv2.VideoCapture("solution_ci_pose_demo.mp4")
47
+ assert cap1.isOpened(), "Error reading video file"
48
+ gym = solutions.AIGym(line_width=2, kpts=[5, 11, 13], show=False)
49
+ while cap1.isOpened():
50
+ success, im0 = cap1.read()
46
51
  if not success:
47
52
  break
48
53
  _ = gym.monitor(im0)
49
- cap.release()
50
- cv2.destroyAllWindows()
54
+ cap1.release()
51
55
 
52
56
 
53
57
  @pytest.mark.slow
@@ -1,6 +1,6 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
- __version__ = "8.3.15"
3
+ __version__ = "8.3.16"
4
4
 
5
5
  import os
6
6
 
@@ -15,3 +15,4 @@ down_angle: 90 # Workouts down_angle for counts, 90 is default value. You can ch
15
15
  kpts: [6, 8, 10] # Keypoints for workouts monitoring, i.e. If you want to consider keypoints for pushups that have mostly values of [6, 8, 10].
16
16
  colormap: # Colormap for heatmap, Only OPENCV supported colormaps can be used. By default COLORMAP_PARULA will be used for visualization.
17
17
  analytics_type: "line" # Analytics type i.e "line", "pie", "bar" or "area" charts. By default, "line" analytics will be used for processing.
18
+ json_file: # parking system regions file path.
@@ -13,9 +13,6 @@ from tqdm import tqdm
13
13
  from ultralytics.data.utils import exif_size, img2label_paths
14
14
  from ultralytics.utils.checks import check_requirements
15
15
 
16
- check_requirements("shapely")
17
- from shapely.geometry import Polygon
18
-
19
16
 
20
17
  def bbox_iof(polygon1, bbox2, eps=1e-6):
21
18
  """
@@ -33,6 +30,9 @@ def bbox_iof(polygon1, bbox2, eps=1e-6):
33
30
  Polygon format: [x1, y1, x2, y2, x3, y3, x4, y4].
34
31
  Bounding box format: [x_min, y_min, x_max, y_max].
35
32
  """
33
+ check_requirements("shapely")
34
+ from shapely.geometry import Polygon
35
+
36
36
  polygon1 = polygon1.reshape(-1, 4, 2)
37
37
  lt_point = np.min(polygon1, axis=-2) # left-top
38
38
  rb_point = np.max(polygon1, axis=-2) # right-bottom
@@ -1,16 +1,40 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
- from ultralytics.solutions.solutions import BaseSolution # Import a parent class
3
+ from ultralytics.solutions.solutions import BaseSolution
4
4
  from ultralytics.utils.plotting import Annotator
5
5
 
6
6
 
7
7
  class AIGym(BaseSolution):
8
- """A class to manage the gym steps of people in a real-time video stream based on their poses."""
8
+ """
9
+ A class to manage gym steps of people in a real-time video stream based on their poses.
10
+
11
+ This class extends BaseSolution to monitor workouts using YOLO pose estimation models. It tracks and counts
12
+ repetitions of exercises based on predefined angle thresholds for up and down positions.
13
+
14
+ Attributes:
15
+ count (List[int]): Repetition counts for each detected person.
16
+ angle (List[float]): Current angle of the tracked body part for each person.
17
+ stage (List[str]): Current exercise stage ('up', 'down', or '-') for each person.
18
+ initial_stage (str | None): Initial stage of the exercise.
19
+ up_angle (float): Angle threshold for considering the 'up' position of an exercise.
20
+ down_angle (float): Angle threshold for considering the 'down' position of an exercise.
21
+ kpts (List[int]): Indices of keypoints used for angle calculation.
22
+ lw (int): Line width for drawing annotations.
23
+ annotator (Annotator): Object for drawing annotations on the image.
24
+
25
+ Methods:
26
+ monitor: Processes a frame to detect poses, calculate angles, and count repetitions.
27
+
28
+ Examples:
29
+ >>> gym = AIGym(model="yolov8n-pose.pt")
30
+ >>> image = cv2.imread("gym_scene.jpg")
31
+ >>> processed_image = gym.monitor(image)
32
+ >>> cv2.imshow("Processed Image", processed_image)
33
+ >>> cv2.waitKey(0)
34
+ """
9
35
 
10
36
  def __init__(self, **kwargs):
11
- """Initialization function for AiGYM class, a child class of BaseSolution class, can be used for workouts
12
- monitoring.
13
- """
37
+ """Initializes AIGym for workout monitoring using pose estimation and predefined angles."""
14
38
  # Check if the model name ends with '-pose'
15
39
  if "model" in kwargs and "-pose" not in kwargs["model"]:
16
40
  kwargs["model"] = "yolo11n-pose.pt"
@@ -31,12 +55,22 @@ class AIGym(BaseSolution):
31
55
 
32
56
  def monitor(self, im0):
33
57
  """
34
- Monitor the workouts using Ultralytics YOLO Pose Model: https://docs.ultralytics.com/tasks/pose/.
58
+ Monitors workouts using Ultralytics YOLO Pose Model.
59
+
60
+ This function processes an input image to track and analyze human poses for workout monitoring. It uses
61
+ the YOLO Pose model to detect keypoints, estimate angles, and count repetitions based on predefined
62
+ angle thresholds.
35
63
 
36
64
  Args:
37
- im0 (ndarray): The input image that will be used for processing
38
- Returns
39
- im0 (ndarray): The processed image for more usage
65
+ im0 (ndarray): Input image for processing.
66
+
67
+ Returns:
68
+ (ndarray): Processed image with annotations for workout monitoring.
69
+
70
+ Examples:
71
+ >>> gym = AIGym()
72
+ >>> image = cv2.imread("workout.jpg")
73
+ >>> processed_image = gym.monitor(image)
40
74
  """
41
75
  # Extract tracks
42
76
  tracks = self.model.track(source=im0, persist=True, classes=self.CFG["classes"])[0]
@@ -12,10 +12,41 @@ from ultralytics.solutions.solutions import BaseSolution # Import a parent clas
12
12
 
13
13
 
14
14
  class Analytics(BaseSolution):
15
- """A class to create and update various types of charts (line, bar, pie, area) for visual analytics."""
15
+ """
16
+ A class for creating and updating various types of charts for visual analytics.
17
+
18
+ This class extends BaseSolution to provide functionality for generating line, bar, pie, and area charts
19
+ based on object detection and tracking data.
20
+
21
+ Attributes:
22
+ type (str): The type of analytics chart to generate ('line', 'bar', 'pie', or 'area').
23
+ x_label (str): Label for the x-axis.
24
+ y_label (str): Label for the y-axis.
25
+ bg_color (str): Background color of the chart frame.
26
+ fg_color (str): Foreground color of the chart frame.
27
+ title (str): Title of the chart window.
28
+ max_points (int): Maximum number of data points to display on the chart.
29
+ fontsize (int): Font size for text display.
30
+ color_cycle (cycle): Cyclic iterator for chart colors.
31
+ total_counts (int): Total count of detected objects (used for line charts).
32
+ clswise_count (Dict[str, int]): Dictionary for class-wise object counts.
33
+ fig (Figure): Matplotlib figure object for the chart.
34
+ ax (Axes): Matplotlib axes object for the chart.
35
+ canvas (FigureCanvas): Canvas for rendering the chart.
36
+
37
+ Methods:
38
+ process_data: Processes image data and updates the chart.
39
+ update_graph: Updates the chart with new data points.
40
+
41
+ Examples:
42
+ >>> analytics = Analytics(analytics_type="line")
43
+ >>> frame = cv2.imread("image.jpg")
44
+ >>> processed_frame = analytics.process_data(frame, frame_number=1)
45
+ >>> cv2.imshow("Analytics", processed_frame)
46
+ """
16
47
 
17
48
  def __init__(self, **kwargs):
18
- """Initialize the Analytics class with various chart types."""
49
+ """Initialize Analytics class with various chart types for visual data representation."""
19
50
  super().__init__(**kwargs)
20
51
 
21
52
  self.type = self.CFG["analytics_type"] # extract type of analytics
@@ -31,8 +62,8 @@ class Analytics(BaseSolution):
31
62
  figsize = (19.2, 10.8) # Set output image size 1920 * 1080
32
63
  self.color_cycle = cycle(["#DD00BA", "#042AFF", "#FF4447", "#7D24FF", "#BD00FF"])
33
64
 
34
- self.total_counts = 0 # count variable for storing total counts i.e for line
35
- self.clswise_count = {} # dictionary for classwise counts
65
+ self.total_counts = 0 # count variable for storing total counts i.e. for line
66
+ self.clswise_count = {} # dictionary for class-wise counts
36
67
 
37
68
  # Ensure line and area chart
38
69
  if self.type in {"line", "area"}:
@@ -48,15 +79,28 @@ class Analytics(BaseSolution):
48
79
  self.canvas = FigureCanvas(self.fig) # Set common axis properties
49
80
  self.ax.set_facecolor(self.bg_color)
50
81
  self.color_mapping = {}
51
- self.ax.axis("equal") if self.type == "pie" else None # Ensure pie chart is circular
82
+
83
+ if self.type == "pie": # Ensure pie chart is circular
84
+ self.ax.axis("equal")
52
85
 
53
86
  def process_data(self, im0, frame_number):
54
87
  """
55
- Process the image data, run object tracking.
88
+ Processes image data and runs object tracking to update analytics charts.
56
89
 
57
90
  Args:
58
- im0 (ndarray): Input image for processing.
59
- frame_number (int): Video frame # for plotting the data.
91
+ im0 (np.ndarray): Input image for processing.
92
+ frame_number (int): Video frame number for plotting the data.
93
+
94
+ Returns:
95
+ (np.ndarray): Processed image with updated analytics chart.
96
+
97
+ Raises:
98
+ ModuleNotFoundError: If an unsupported chart type is specified.
99
+
100
+ Examples:
101
+ >>> analytics = Analytics(analytics_type="line")
102
+ >>> frame = np.zeros((480, 640, 3), dtype=np.uint8)
103
+ >>> processed_frame = analytics.process_data(frame, frame_number=1)
60
104
  """
61
105
  self.extract_tracks(im0) # Extract tracks
62
106
 
@@ -79,13 +123,22 @@ class Analytics(BaseSolution):
79
123
 
80
124
  def update_graph(self, frame_number, count_dict=None, plot="line"):
81
125
  """
82
- Update the graph (line or area) with new data for single or multiple classes.
126
+ Updates the graph with new data for single or multiple classes.
83
127
 
84
128
  Args:
85
129
  frame_number (int): The current frame number.
86
- count_dict (dict, optional): Dictionary with class names as keys and counts as values for multiple classes.
87
- If None, updates a single line graph.
88
- plot (str): Type of the plot i.e. line, bar or area.
130
+ count_dict (Dict[str, int] | None): Dictionary with class names as keys and counts as values for multiple
131
+ classes. If None, updates a single line graph.
132
+ plot (str): Type of the plot. Options are 'line', 'bar', 'pie', or 'area'.
133
+
134
+ Returns:
135
+ (np.ndarray): Updated image containing the graph.
136
+
137
+ Examples:
138
+ >>> analytics = Analytics()
139
+ >>> frame_number = 10
140
+ >>> count_dict = {"person": 5, "car": 3}
141
+ >>> updated_image = analytics.update_graph(frame_number, count_dict, plot="bar")
89
142
  """
90
143
  if count_dict is None:
91
144
  # Single line update
@@ -4,15 +4,41 @@ import math
4
4
 
5
5
  import cv2
6
6
 
7
- from ultralytics.solutions.solutions import BaseSolution # Import a parent class
7
+ from ultralytics.solutions.solutions import BaseSolution
8
8
  from ultralytics.utils.plotting import Annotator, colors
9
9
 
10
10
 
11
11
  class DistanceCalculation(BaseSolution):
12
- """A class to calculate distance between two objects in a real-time video stream based on their tracks."""
12
+ """
13
+ A class to calculate distance between two objects in a real-time video stream based on their tracks.
14
+
15
+ This class extends BaseSolution to provide functionality for selecting objects and calculating the distance
16
+ between them in a video stream using YOLO object detection and tracking.
17
+
18
+ Attributes:
19
+ left_mouse_count (int): Counter for left mouse button clicks.
20
+ selected_boxes (Dict[int, List[float]]): Dictionary to store selected bounding boxes and their track IDs.
21
+ annotator (Annotator): An instance of the Annotator class for drawing on the image.
22
+ boxes (List[List[float]]): List of bounding boxes for detected objects.
23
+ track_ids (List[int]): List of track IDs for detected objects.
24
+ clss (List[int]): List of class indices for detected objects.
25
+ names (List[str]): List of class names that the model can detect.
26
+ centroids (List[List[int]]): List to store centroids of selected bounding boxes.
27
+
28
+ Methods:
29
+ mouse_event_for_distance: Handles mouse events for selecting objects in the video stream.
30
+ calculate: Processes video frames and calculates the distance between selected objects.
31
+
32
+ Examples:
33
+ >>> distance_calc = DistanceCalculation()
34
+ >>> frame = cv2.imread("frame.jpg")
35
+ >>> processed_frame = distance_calc.calculate(frame)
36
+ >>> cv2.imshow("Distance Calculation", processed_frame)
37
+ >>> cv2.waitKey(0)
38
+ """
13
39
 
14
40
  def __init__(self, **kwargs):
15
- """Initializes the DistanceCalculation class with the given parameters."""
41
+ """Initializes the DistanceCalculation class for measuring object distances in video streams."""
16
42
  super().__init__(**kwargs)
17
43
 
18
44
  # Mouse event information
@@ -21,14 +47,18 @@ class DistanceCalculation(BaseSolution):
21
47
 
22
48
  def mouse_event_for_distance(self, event, x, y, flags, param):
23
49
  """
24
- Handles mouse events to select regions in a real-time video stream.
50
+ Handles mouse events to select regions in a real-time video stream for distance calculation.
25
51
 
26
52
  Args:
27
- event (int): Type of mouse event (e.g., cv2.EVENT_MOUSEMOVE, cv2.EVENT_LBUTTONDOWN, etc.).
53
+ event (int): Type of mouse event (e.g., cv2.EVENT_MOUSEMOVE, cv2.EVENT_LBUTTONDOWN).
28
54
  x (int): X-coordinate of the mouse pointer.
29
55
  y (int): Y-coordinate of the mouse pointer.
30
- flags (int): Flags associated with the event (e.g., cv2.EVENT_FLAG_CTRLKEY, cv2.EVENT_FLAG_SHIFTKEY, etc.).
31
- param (dict): Additional parameters passed to the function.
56
+ flags (int): Flags associated with the event (e.g., cv2.EVENT_FLAG_CTRLKEY, cv2.EVENT_FLAG_SHIFTKEY).
57
+ param (Dict): Additional parameters passed to the function.
58
+
59
+ Examples:
60
+ >>> # Assuming 'dc' is an instance of DistanceCalculation
61
+ >>> cv2.setMouseCallback("window_name", dc.mouse_event_for_distance)
32
62
  """
33
63
  if event == cv2.EVENT_LBUTTONDOWN:
34
64
  self.left_mouse_count += 1
@@ -43,13 +73,23 @@ class DistanceCalculation(BaseSolution):
43
73
 
44
74
  def calculate(self, im0):
45
75
  """
46
- Processes the video frame and calculates the distance between two bounding boxes.
76
+ Processes a video frame and calculates the distance between two selected bounding boxes.
77
+
78
+ This method extracts tracks from the input frame, annotates bounding boxes, and calculates the distance
79
+ between two user-selected objects if they have been chosen.
47
80
 
48
81
  Args:
49
- im0 (ndarray): The image frame.
82
+ im0 (numpy.ndarray): The input image frame to process.
50
83
 
51
84
  Returns:
52
- (ndarray): The processed image frame.
85
+ (numpy.ndarray): The processed image frame with annotations and distance calculations.
86
+
87
+ Examples:
88
+ >>> import numpy as np
89
+ >>> from ultralytics.solutions import DistanceCalculation
90
+ >>> dc = DistanceCalculation()
91
+ >>> frame = np.random.randint(0, 255, (480, 640, 3), dtype=np.uint8)
92
+ >>> processed_frame = dc.calculate(frame)
53
93
  """
54
94
  self.annotator = Annotator(im0, line_width=self.line_width) # Initialize annotator
55
95
  self.extract_tracks(im0) # Extract tracks
@@ -3,15 +3,40 @@
3
3
  import cv2
4
4
  import numpy as np
5
5
 
6
- from ultralytics.solutions.object_counter import ObjectCounter # Import object counter class
6
+ from ultralytics.solutions.object_counter import ObjectCounter
7
7
  from ultralytics.utils.plotting import Annotator
8
8
 
9
9
 
10
10
  class Heatmap(ObjectCounter):
11
- """A class to draw heatmaps in real-time video stream based on their tracks."""
11
+ """
12
+ A class to draw heatmaps in real-time video streams based on object tracks.
13
+
14
+ This class extends the ObjectCounter class to generate and visualize heatmaps of object movements in video
15
+ streams. It uses tracked object positions to create a cumulative heatmap effect over time.
16
+
17
+ Attributes:
18
+ initialized (bool): Flag indicating whether the heatmap has been initialized.
19
+ colormap (int): OpenCV colormap used for heatmap visualization.
20
+ heatmap (np.ndarray): Array storing the cumulative heatmap data.
21
+ annotator (Annotator): Object for drawing annotations on the image.
22
+
23
+ Methods:
24
+ heatmap_effect: Calculates and updates the heatmap effect for a given bounding box.
25
+ generate_heatmap: Generates and applies the heatmap effect to each frame.
26
+
27
+ Examples:
28
+ >>> from ultralytics.solutions import Heatmap
29
+ >>> heatmap = Heatmap(model="yolov8n.pt", colormap=cv2.COLORMAP_JET)
30
+ >>> results = heatmap("path/to/video.mp4")
31
+ >>> for result in results:
32
+ ... print(result.speed) # Print inference speed
33
+ ... cv2.imshow("Heatmap", result.plot())
34
+ ... if cv2.waitKey(1) & 0xFF == ord("q"):
35
+ ... break
36
+ """
12
37
 
13
38
  def __init__(self, **kwargs):
14
- """Initializes function for heatmap class with default values."""
39
+ """Initializes the Heatmap class for real-time video stream heatmap generation based on object tracks."""
15
40
  super().__init__(**kwargs)
16
41
 
17
42
  self.initialized = False # bool variable for heatmap initialization
@@ -23,10 +48,15 @@ class Heatmap(ObjectCounter):
23
48
 
24
49
  def heatmap_effect(self, box):
25
50
  """
26
- Efficient calculation of heatmap area and effect location for applying colormap.
51
+ Efficiently calculates heatmap area and effect location for applying colormap.
27
52
 
28
53
  Args:
29
- box (list): Bounding Box coordinates data [x0, y0, x1, y1]
54
+ box (List[float]): Bounding box coordinates [x0, y0, x1, y1].
55
+
56
+ Examples:
57
+ >>> heatmap = Heatmap()
58
+ >>> box = [100, 100, 200, 200]
59
+ >>> heatmap.heatmap_effect(box)
30
60
  """
31
61
  x0, y0, x1, y1 = map(int, box)
32
62
  radius_squared = (min(x1 - x0, y1 - y0) // 2) ** 2
@@ -48,9 +78,15 @@ class Heatmap(ObjectCounter):
48
78
  Generate heatmap for each frame using Ultralytics.
49
79
 
50
80
  Args:
51
- im0 (ndarray): Input image array for processing
81
+ im0 (np.ndarray): Input image array for processing.
82
+
52
83
  Returns:
53
- im0 (ndarray): Processed image for further usage
84
+ (np.ndarray): Processed image with heatmap overlay and object counts (if region is specified).
85
+
86
+ Examples:
87
+ >>> heatmap = Heatmap()
88
+ >>> im0 = cv2.imread("image.jpg")
89
+ >>> result = heatmap.generate_heatmap(im0)
54
90
  """
55
91
  if not self.initialized:
56
92
  self.heatmap = np.zeros_like(im0, dtype=np.float32) * 0.99
@@ -70,16 +106,17 @@ class Heatmap(ObjectCounter):
70
106
  self.store_classwise_counts(cls) # store classwise counts in dict
71
107
 
72
108
  # Store tracking previous position and perform object counting
73
- prev_position = self.track_history[track_id][-2] if len(self.track_history[track_id]) > 1 else None
109
+ prev_position = None
110
+ if len(self.track_history[track_id]) > 1:
111
+ prev_position = self.track_history[track_id][-2]
74
112
  self.count_objects(self.track_line, box, track_id, prev_position, cls) # Perform object counting
75
113
 
76
- self.display_counts(im0) if self.region is not None else None # Display the counts on the frame
114
+ if self.region is not None:
115
+ self.display_counts(im0) # Display the counts on the frame
77
116
 
78
117
  # Normalize, apply colormap to heatmap and combine with original image
79
- im0 = (
80
- im0
81
- if self.track_data.id is None
82
- else cv2.addWeighted(
118
+ if self.track_data.id is not None:
119
+ im0 = cv2.addWeighted(
83
120
  im0,
84
121
  0.5,
85
122
  cv2.applyColorMap(
@@ -88,7 +125,6 @@ class Heatmap(ObjectCounter):
88
125
  0.5,
89
126
  0,
90
127
  )
91
- )
92
128
 
93
129
  self.display_output(im0) # display output with base class function
94
130
  return im0 # return output image for more usage