ultralytics 8.3.132__tar.gz → 8.3.134__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (277) hide show
  1. {ultralytics-8.3.132/ultralytics.egg-info → ultralytics-8.3.134}/PKG-INFO +2 -2
  2. {ultralytics-8.3.132 → ultralytics-8.3.134}/pyproject.toml +1 -1
  3. {ultralytics-8.3.132 → ultralytics-8.3.134}/tests/test_cli.py +10 -0
  4. {ultralytics-8.3.132 → ultralytics-8.3.134}/tests/test_cuda.py +16 -10
  5. {ultralytics-8.3.132 → ultralytics-8.3.134}/tests/test_python.py +4 -2
  6. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/__init__.py +1 -1
  7. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/augment.py +4 -0
  8. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/build.py +2 -2
  9. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/dataset.py +4 -2
  10. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/loaders.py +18 -2
  11. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/utils.py +2 -2
  12. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/engine/exporter.py +14 -6
  13. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/engine/model.py +1 -1
  14. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/engine/results.py +1 -1
  15. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/detect/predict.py +1 -1
  16. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/nn/tasks.py +6 -4
  17. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/ai_gym.py +23 -30
  18. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/heatmap.py +1 -2
  19. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/object_counter.py +4 -20
  20. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/security_alarm.py +1 -1
  21. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/trackers/byte_tracker.py +5 -1
  22. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/trackers/track.py +4 -5
  23. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/trackers/utils/gmc.py +6 -6
  24. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/__init__.py +2 -1
  25. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/checks.py +16 -11
  26. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/loss.py +1 -1
  27. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/metrics.py +1 -1
  28. {ultralytics-8.3.132 → ultralytics-8.3.134/ultralytics.egg-info}/PKG-INFO +2 -2
  29. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics.egg-info/requires.txt +1 -1
  30. {ultralytics-8.3.132 → ultralytics-8.3.134}/LICENSE +0 -0
  31. {ultralytics-8.3.132 → ultralytics-8.3.134}/README.md +0 -0
  32. {ultralytics-8.3.132 → ultralytics-8.3.134}/setup.cfg +0 -0
  33. {ultralytics-8.3.132 → ultralytics-8.3.134}/tests/__init__.py +0 -0
  34. {ultralytics-8.3.132 → ultralytics-8.3.134}/tests/conftest.py +0 -0
  35. {ultralytics-8.3.132 → ultralytics-8.3.134}/tests/test_engine.py +0 -0
  36. {ultralytics-8.3.132 → ultralytics-8.3.134}/tests/test_exports.py +0 -0
  37. {ultralytics-8.3.132 → ultralytics-8.3.134}/tests/test_integrations.py +0 -0
  38. {ultralytics-8.3.132 → ultralytics-8.3.134}/tests/test_solutions.py +0 -0
  39. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/assets/bus.jpg +0 -0
  40. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/assets/zidane.jpg +0 -0
  41. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/__init__.py +0 -0
  42. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/Argoverse.yaml +0 -0
  43. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/DOTAv1.5.yaml +0 -0
  44. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/DOTAv1.yaml +0 -0
  45. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/GlobalWheat2020.yaml +0 -0
  46. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/HomeObjects-3K.yaml +0 -0
  47. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/ImageNet.yaml +0 -0
  48. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/Objects365.yaml +0 -0
  49. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/SKU-110K.yaml +0 -0
  50. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/VOC.yaml +0 -0
  51. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/VisDrone.yaml +0 -0
  52. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/african-wildlife.yaml +0 -0
  53. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/brain-tumor.yaml +0 -0
  54. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/carparts-seg.yaml +0 -0
  55. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/coco-pose.yaml +0 -0
  56. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/coco.yaml +0 -0
  57. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/coco128-seg.yaml +0 -0
  58. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/coco128.yaml +0 -0
  59. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/coco8-multispectral.yaml +0 -0
  60. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/coco8-pose.yaml +0 -0
  61. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/coco8-seg.yaml +0 -0
  62. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/coco8.yaml +0 -0
  63. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/crack-seg.yaml +0 -0
  64. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/dog-pose.yaml +0 -0
  65. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/dota8-multispectral.yaml +0 -0
  66. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/dota8.yaml +0 -0
  67. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/hand-keypoints.yaml +0 -0
  68. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/lvis.yaml +0 -0
  69. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/medical-pills.yaml +0 -0
  70. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/open-images-v7.yaml +0 -0
  71. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/package-seg.yaml +0 -0
  72. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/signature.yaml +0 -0
  73. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/tiger-pose.yaml +0 -0
  74. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/datasets/xView.yaml +0 -0
  75. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/default.yaml +0 -0
  76. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/11/yolo11-cls-resnet18.yaml +0 -0
  77. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/11/yolo11-cls.yaml +0 -0
  78. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/11/yolo11-obb.yaml +0 -0
  79. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/11/yolo11-pose.yaml +0 -0
  80. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/11/yolo11-seg.yaml +0 -0
  81. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/11/yolo11.yaml +0 -0
  82. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/11/yoloe-11-seg.yaml +0 -0
  83. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/11/yoloe-11.yaml +0 -0
  84. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/12/yolo12-cls.yaml +0 -0
  85. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/12/yolo12-obb.yaml +0 -0
  86. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/12/yolo12-pose.yaml +0 -0
  87. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/12/yolo12-seg.yaml +0 -0
  88. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/12/yolo12.yaml +0 -0
  89. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +0 -0
  90. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +0 -0
  91. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +0 -0
  92. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +0 -0
  93. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v10/yolov10b.yaml +0 -0
  94. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v10/yolov10l.yaml +0 -0
  95. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v10/yolov10m.yaml +0 -0
  96. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v10/yolov10n.yaml +0 -0
  97. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v10/yolov10s.yaml +0 -0
  98. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v10/yolov10x.yaml +0 -0
  99. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v3/yolov3-spp.yaml +0 -0
  100. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v3/yolov3-tiny.yaml +0 -0
  101. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v3/yolov3.yaml +0 -0
  102. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v5/yolov5-p6.yaml +0 -0
  103. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v5/yolov5.yaml +0 -0
  104. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v6/yolov6.yaml +0 -0
  105. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yoloe-v8-seg.yaml +0 -0
  106. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yoloe-v8.yaml +0 -0
  107. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +0 -0
  108. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +0 -0
  109. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-cls.yaml +0 -0
  110. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +0 -0
  111. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +0 -0
  112. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-ghost.yaml +0 -0
  113. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-obb.yaml +0 -0
  114. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-p2.yaml +0 -0
  115. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-p6.yaml +0 -0
  116. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +0 -0
  117. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-pose.yaml +0 -0
  118. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +0 -0
  119. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +0 -0
  120. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-seg.yaml +0 -0
  121. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-world.yaml +0 -0
  122. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8-worldv2.yaml +0 -0
  123. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v8/yolov8.yaml +0 -0
  124. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v9/yolov9c-seg.yaml +0 -0
  125. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v9/yolov9c.yaml +0 -0
  126. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v9/yolov9e-seg.yaml +0 -0
  127. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v9/yolov9e.yaml +0 -0
  128. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v9/yolov9m.yaml +0 -0
  129. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v9/yolov9s.yaml +0 -0
  130. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/models/v9/yolov9t.yaml +0 -0
  131. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/trackers/botsort.yaml +0 -0
  132. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/cfg/trackers/bytetrack.yaml +0 -0
  133. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/__init__.py +0 -0
  134. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/annotator.py +0 -0
  135. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/base.py +0 -0
  136. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/converter.py +0 -0
  137. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/scripts/download_weights.sh +0 -0
  138. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/scripts/get_coco.sh +0 -0
  139. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/scripts/get_coco128.sh +0 -0
  140. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/scripts/get_imagenet.sh +0 -0
  141. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/split.py +0 -0
  142. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/data/split_dota.py +0 -0
  143. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/engine/__init__.py +0 -0
  144. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/engine/predictor.py +0 -0
  145. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/engine/trainer.py +0 -0
  146. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/engine/tuner.py +0 -0
  147. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/engine/validator.py +0 -0
  148. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/hub/__init__.py +0 -0
  149. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/hub/auth.py +0 -0
  150. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/hub/google/__init__.py +0 -0
  151. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/hub/session.py +0 -0
  152. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/hub/utils.py +0 -0
  153. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/__init__.py +0 -0
  154. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/fastsam/__init__.py +0 -0
  155. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/fastsam/model.py +0 -0
  156. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/fastsam/predict.py +0 -0
  157. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/fastsam/utils.py +0 -0
  158. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/fastsam/val.py +0 -0
  159. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/nas/__init__.py +0 -0
  160. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/nas/model.py +0 -0
  161. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/nas/predict.py +0 -0
  162. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/nas/val.py +0 -0
  163. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/rtdetr/__init__.py +0 -0
  164. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/rtdetr/model.py +0 -0
  165. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/rtdetr/predict.py +0 -0
  166. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/rtdetr/train.py +0 -0
  167. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/rtdetr/val.py +0 -0
  168. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/__init__.py +0 -0
  169. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/amg.py +0 -0
  170. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/build.py +0 -0
  171. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/model.py +0 -0
  172. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/modules/__init__.py +0 -0
  173. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/modules/blocks.py +0 -0
  174. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/modules/decoders.py +0 -0
  175. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/modules/encoders.py +0 -0
  176. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/modules/memory_attention.py +0 -0
  177. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/modules/sam.py +0 -0
  178. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/modules/tiny_encoder.py +0 -0
  179. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/modules/transformer.py +0 -0
  180. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/modules/utils.py +0 -0
  181. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/sam/predict.py +0 -0
  182. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/utils/__init__.py +0 -0
  183. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/utils/loss.py +0 -0
  184. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/utils/ops.py +0 -0
  185. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/__init__.py +0 -0
  186. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/classify/__init__.py +0 -0
  187. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/classify/predict.py +0 -0
  188. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/classify/train.py +0 -0
  189. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/classify/val.py +0 -0
  190. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/detect/__init__.py +0 -0
  191. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/detect/train.py +0 -0
  192. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/detect/val.py +0 -0
  193. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/model.py +0 -0
  194. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/obb/__init__.py +0 -0
  195. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/obb/predict.py +0 -0
  196. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/obb/train.py +0 -0
  197. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/obb/val.py +0 -0
  198. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/pose/__init__.py +0 -0
  199. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/pose/predict.py +0 -0
  200. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/pose/train.py +0 -0
  201. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/pose/val.py +0 -0
  202. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/segment/__init__.py +0 -0
  203. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/segment/predict.py +0 -0
  204. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/segment/train.py +0 -0
  205. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/segment/val.py +0 -0
  206. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/world/__init__.py +0 -0
  207. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/world/train.py +0 -0
  208. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/world/train_world.py +0 -0
  209. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/yoloe/__init__.py +0 -0
  210. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/yoloe/predict.py +0 -0
  211. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/yoloe/train.py +0 -0
  212. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/yoloe/train_seg.py +0 -0
  213. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/models/yolo/yoloe/val.py +0 -0
  214. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/nn/__init__.py +0 -0
  215. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/nn/autobackend.py +0 -0
  216. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/nn/modules/__init__.py +0 -0
  217. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/nn/modules/activation.py +0 -0
  218. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/nn/modules/block.py +0 -0
  219. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/nn/modules/conv.py +0 -0
  220. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/nn/modules/head.py +0 -0
  221. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/nn/modules/transformer.py +0 -0
  222. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/nn/modules/utils.py +0 -0
  223. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/nn/text_model.py +0 -0
  224. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/__init__.py +0 -0
  225. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/analytics.py +0 -0
  226. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/config.py +0 -0
  227. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/distance_calculation.py +0 -0
  228. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/instance_segmentation.py +0 -0
  229. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/object_blurrer.py +0 -0
  230. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/object_cropper.py +0 -0
  231. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/parking_management.py +0 -0
  232. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/queue_management.py +0 -0
  233. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/region_counter.py +0 -0
  234. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/similarity_search.py +0 -0
  235. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/solutions.py +0 -0
  236. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/speed_estimation.py +0 -0
  237. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/streamlit_inference.py +0 -0
  238. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/templates/similarity-search.html +0 -0
  239. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/trackzone.py +0 -0
  240. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/solutions/vision_eye.py +0 -0
  241. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/trackers/__init__.py +0 -0
  242. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/trackers/basetrack.py +0 -0
  243. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/trackers/bot_sort.py +0 -0
  244. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/trackers/utils/__init__.py +0 -0
  245. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/trackers/utils/kalman_filter.py +0 -0
  246. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/trackers/utils/matching.py +0 -0
  247. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/autobatch.py +0 -0
  248. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/autodevice.py +0 -0
  249. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/benchmarks.py +0 -0
  250. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/callbacks/__init__.py +0 -0
  251. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/callbacks/base.py +0 -0
  252. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/callbacks/clearml.py +0 -0
  253. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/callbacks/comet.py +0 -0
  254. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/callbacks/dvc.py +0 -0
  255. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/callbacks/hub.py +0 -0
  256. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/callbacks/mlflow.py +0 -0
  257. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/callbacks/neptune.py +0 -0
  258. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/callbacks/raytune.py +0 -0
  259. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/callbacks/tensorboard.py +0 -0
  260. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/callbacks/wb.py +0 -0
  261. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/dist.py +0 -0
  262. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/downloads.py +0 -0
  263. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/errors.py +0 -0
  264. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/export.py +0 -0
  265. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/files.py +0 -0
  266. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/instance.py +0 -0
  267. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/ops.py +0 -0
  268. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/patches.py +0 -0
  269. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/plotting.py +0 -0
  270. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/tal.py +0 -0
  271. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/torch_utils.py +0 -0
  272. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/triton.py +0 -0
  273. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics/utils/tuner.py +0 -0
  274. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics.egg-info/SOURCES.txt +0 -0
  275. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics.egg-info/dependency_links.txt +0 -0
  276. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics.egg-info/entry_points.txt +0 -0
  277. {ultralytics-8.3.132 → ultralytics-8.3.134}/ultralytics.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ultralytics
3
- Version: 8.3.132
3
+ Version: 8.3.134
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -59,7 +59,7 @@ Requires-Dist: mkdocstrings[python]; extra == "dev"
59
59
  Requires-Dist: mkdocs-ultralytics-plugin>=0.1.17; extra == "dev"
60
60
  Requires-Dist: mkdocs-macros-plugin>=1.0.5; extra == "dev"
61
61
  Provides-Extra: export
62
- Requires-Dist: onnx>=1.12.0; extra == "export"
62
+ Requires-Dist: onnx<1.18.0,>=1.12.0; extra == "export"
63
63
  Requires-Dist: coremltools>=8.0; (platform_system != "Windows" and python_version <= "3.13") and extra == "export"
64
64
  Requires-Dist: scikit-learn>=1.3.2; (platform_system != "Windows" and python_version <= "3.13") and extra == "export"
65
65
  Requires-Dist: openvino>=2024.0.0; extra == "export"
@@ -93,7 +93,7 @@ dev = [
93
93
  "mkdocs-macros-plugin>=1.0.5" # duplicating content (i.e. export tables) in multiple places
94
94
  ]
95
95
  export = [
96
- "onnx>=1.12.0", # ONNX export
96
+ "onnx>=1.12.0,<1.18.0", # ONNX export
97
97
  "coremltools>=8.0; platform_system != 'Windows' and python_version <= '3.13'", # CoreML supported on macOS and Linux
98
98
  "scikit-learn>=1.3.2; platform_system != 'Windows' and python_version <= '3.13'", # CoreML k-means quantization
99
99
  "openvino>=2024.0.0", # OpenVINO export
@@ -61,6 +61,7 @@ def test_rtdetr(task: str = "detect", model: str = "yolov8n-rtdetr.yaml", data:
61
61
  if TORCH_1_9:
62
62
  weights = WEIGHTS_DIR / "rtdetr-l.pt"
63
63
  run(f"yolo predict {task} model={weights} source={ASSETS / 'bus.jpg'} imgsz=160 save save_crop save_txt")
64
+ run(f"yolo train {task} model={weights} epochs=1 imgsz=160 cache=disk data=coco8.yaml")
64
65
 
65
66
 
66
67
  @pytest.mark.skipif(checks.IS_PYTHON_3_12, reason="MobileSAM with CLIP is not supported in Python 3.12")
@@ -126,3 +127,12 @@ def test_train_gpu(task: str, model: str, data: str) -> None:
126
127
  """Test YOLO training on GPU(s) for various tasks and models."""
127
128
  run(f"yolo train {task} model={model} data={data} imgsz=32 epochs=1 device=0") # single GPU
128
129
  run(f"yolo train {task} model={model} data={data} imgsz=32 epochs=1 device=0,1") # multi GPU
130
+
131
+
132
+ @pytest.mark.parametrize(
133
+ "solution",
134
+ ["count", "blur", "workout", "heatmap", "isegment", "visioneye", "speed", "queue", "analytics", "trackzone"],
135
+ )
136
+ def test_solutions(solution: str) -> None:
137
+ """Test yolo solutions command-line modes."""
138
+ run(f"yolo solutions {solution} verbose=False")
@@ -9,7 +9,7 @@ import torch
9
9
  from tests import CUDA_DEVICE_COUNT, CUDA_IS_AVAILABLE, MODEL, SOURCE
10
10
  from ultralytics import YOLO
11
11
  from ultralytics.cfg import TASK2DATA, TASK2MODEL, TASKS
12
- from ultralytics.utils import ASSETS, WEIGHTS_DIR
12
+ from ultralytics.utils import ASSETS, IS_JETSON, WEIGHTS_DIR
13
13
  from ultralytics.utils.autodevice import GPUInfo
14
14
  from ultralytics.utils.checks import check_amp
15
15
  from ultralytics.utils.torch_utils import TORCH_1_13
@@ -17,11 +17,14 @@ from ultralytics.utils.torch_utils import TORCH_1_13
17
17
  # Try to find idle devices if CUDA is available
18
18
  DEVICES = []
19
19
  if CUDA_IS_AVAILABLE:
20
- gpu_info = GPUInfo()
21
- gpu_info.print_status()
22
- idle_gpus = gpu_info.select_idle_gpu(count=2, min_memory_mb=2048)
23
- if idle_gpus:
24
- DEVICES = idle_gpus
20
+ if IS_JETSON:
21
+ DEVICES = [0] # NVIDIA Jetson only has one GPU and does not fully support pynvml library
22
+ else:
23
+ gpu_info = GPUInfo()
24
+ gpu_info.print_status()
25
+ idle_gpus = gpu_info.select_idle_gpu(count=2, min_memory_mb=2048)
26
+ if idle_gpus:
27
+ DEVICES = idle_gpus
25
28
 
26
29
 
27
30
  def test_checks():
@@ -38,6 +41,7 @@ def test_amp():
38
41
 
39
42
 
40
43
  @pytest.mark.slow
44
+ # @pytest.mark.skipif(IS_JETSON, reason="Temporary disable ONNX for Jetson")
41
45
  @pytest.mark.skipif(not DEVICES, reason="No CUDA devices available")
42
46
  @pytest.mark.parametrize(
43
47
  "task, dynamic, int8, half, batch, simplify, nms",
@@ -49,7 +53,7 @@ def test_amp():
49
53
  if not (
50
54
  (int8 and half)
51
55
  or (task == "classify" and nms)
52
- or (task == "obb" and nms and not TORCH_1_13)
56
+ or (task == "obb" and nms and (not TORCH_1_13 or IS_JETSON)) # obb nms fails on NVIDIA Jetson
53
57
  or (simplify and dynamic) # onnxslim is slow when dynamic=True
54
58
  )
55
59
  ],
@@ -110,9 +114,11 @@ def test_train():
110
114
 
111
115
  device = tuple(DEVICES) if len(DEVICES) > 1 else DEVICES[0]
112
116
  results = YOLO(MODEL).train(data="coco8.yaml", imgsz=64, epochs=1, device=device) # requires imgsz>=64
113
- visible = eval(os.environ["CUDA_VISIBLE_DEVICES"])
114
- assert visible == device, f"Passed GPUs '{device}', but used GPUs '{visible}'"
115
- assert results is (None if len(DEVICES) > 1 else not None) # DDP returns None, single-GPU returns metrics
117
+ # NVIDIA Jetson only has one GPU and therefore skipping checks
118
+ if not IS_JETSON:
119
+ visible = eval(os.environ["CUDA_VISIBLE_DEVICES"])
120
+ assert visible == device, f"Passed GPUs '{device}', but used GPUs '{visible}'"
121
+ assert results is (None if len(DEVICES) > 1 else not None) # DDP returns None, single-GPU returns metrics
116
122
 
117
123
 
118
124
  @pytest.mark.slow
@@ -271,10 +271,12 @@ def test_results(model):
271
271
  r = r.to(device="cpu", dtype=torch.float32)
272
272
  r.save_txt(txt_file=TMP / "runs/tests/label.txt", save_conf=True)
273
273
  r.save_crop(save_dir=TMP / "runs/tests/crops/")
274
- r.to_json(normalize=True)
275
- r.to_df(decimals=3)
274
+ r.to_df(decimals=3) # Align to_ methods: https://docs.ultralytics.com/modes/predict/#working-with-results
276
275
  r.to_csv()
277
276
  r.to_xml()
277
+ r.to_html()
278
+ r.to_json(normalize=True)
279
+ r.to_sql()
278
280
  r.plot(pil=True, save=True, filename=TMP / "results_plot_save.jpg")
279
281
  r.plot(conf=True, boxes=True)
280
282
  print(r, len(r), r.path) # print after methods
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- __version__ = "8.3.132"
3
+ __version__ = "8.3.134"
4
4
 
5
5
  import os
6
6
 
@@ -1170,6 +1170,8 @@ class RandomPerspective:
1170
1170
  img = cv2.warpPerspective(img, M, dsize=self.size, borderValue=(114, 114, 114))
1171
1171
  else: # affine
1172
1172
  img = cv2.warpAffine(img, M[:2], dsize=self.size, borderValue=(114, 114, 114))
1173
+ if img.ndim == 2:
1174
+ img = img[..., None]
1173
1175
  return img, M, s
1174
1176
 
1175
1177
  def apply_bboxes(self, bboxes, M):
@@ -1824,6 +1826,8 @@ class CopyPaste(BaseMixTransform):
1824
1826
  cv2.drawContours(im_new, instances2.segments[[j]].astype(np.int32), -1, (1, 1, 1), cv2.FILLED)
1825
1827
 
1826
1828
  result = labels2.get("img", cv2.flip(im, 1)) # augment segments
1829
+ if result.ndim == 2: # cv2.flip would eliminate the last dimension for grayscale images
1830
+ result = result[..., None]
1827
1831
  i = im_new.astype(bool)
1828
1832
  im[i] = result[i]
1829
1833
 
@@ -244,9 +244,9 @@ def load_inference_source(source=None, batch=1, vid_stride=1, buffer=False, chan
244
244
  elif in_memory:
245
245
  dataset = source
246
246
  elif stream:
247
- dataset = LoadStreams(source, vid_stride=vid_stride, buffer=buffer)
247
+ dataset = LoadStreams(source, vid_stride=vid_stride, buffer=buffer, channels=channels)
248
248
  elif screenshot:
249
- dataset = LoadScreenshots(source)
249
+ dataset = LoadScreenshots(source, channels=channels)
250
250
  elif from_img:
251
251
  dataset = LoadPilAndNumpy(source, channels=channels)
252
252
  else:
@@ -184,7 +184,9 @@ class YOLODataset(BaseDataset):
184
184
  [cache.pop(k) for k in ("hash", "version", "msgs")] # remove items
185
185
  labels = cache["labels"]
186
186
  if not labels:
187
- LOGGER.warning(f"No images found in {cache_path}, training may not work correctly. {HELP_URL}")
187
+ raise RuntimeError(
188
+ f"No valid images found in {cache_path}. Images with incorrectly formatted labels are ignored. {HELP_URL}"
189
+ )
188
190
  self.im_files = [lb["im_file"] for lb in labels] # update im_files
189
191
 
190
192
  # Check if the dataset is all boxes or all segments
@@ -199,7 +201,7 @@ class YOLODataset(BaseDataset):
199
201
  for lb in labels:
200
202
  lb["segments"] = []
201
203
  if len_cls == 0:
202
- LOGGER.warning(f"No labels found in {cache_path}, training may not work correctly. {HELP_URL}")
204
+ LOGGER.warning(f"Labels are missing or empty in {cache_path}, training may not work correctly. {HELP_URL}")
203
205
  return labels
204
206
 
205
207
  def build_transforms(self, hyp=None):
@@ -68,6 +68,7 @@ class LoadStreams:
68
68
  shape (List[Tuple[int, int, int]]): List of shapes for each stream.
69
69
  caps (List[cv2.VideoCapture]): List of cv2.VideoCapture objects for each stream.
70
70
  bs (int): Batch size for processing.
71
+ cv2_flag (int): OpenCV flag for image reading (grayscale or RGB).
71
72
 
72
73
  Methods:
73
74
  update: Read stream frames in daemon thread.
@@ -89,13 +90,14 @@ class LoadStreams:
89
90
  - The class implements a buffer system to manage frame storage and retrieval.
90
91
  """
91
92
 
92
- def __init__(self, sources="file.streams", vid_stride=1, buffer=False):
93
+ def __init__(self, sources="file.streams", vid_stride=1, buffer=False, channels=3):
93
94
  """Initialize stream loader for multiple video sources, supporting various stream types."""
94
95
  torch.backends.cudnn.benchmark = True # faster for fixed-size inference
95
96
  self.buffer = buffer # buffer input streams
96
97
  self.running = True # running flag for Thread
97
98
  self.mode = "stream"
98
99
  self.vid_stride = vid_stride # video frame-rate stride
100
+ self.cv2_flag = cv2.IMREAD_GRAYSCALE if channels == 1 else cv2.IMREAD_COLOR # grayscale or RGB
99
101
 
100
102
  sources = Path(sources).read_text().rsplit() if os.path.isfile(sources) else [sources]
101
103
  n = len(sources)
@@ -131,6 +133,7 @@ class LoadStreams:
131
133
  self.fps[i] = max((fps if math.isfinite(fps) else 0) % 100, 0) or 30 # 30 FPS fallback
132
134
 
133
135
  success, im = self.caps[i].read() # guarantee first frame
136
+ im = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)[..., None] if self.cv2_flag == cv2.IMREAD_GRAYSCALE else im
134
137
  if not success or im is None:
135
138
  raise ConnectionError(f"{st}Failed to read images from {s}")
136
139
  self.imgs[i].append(im)
@@ -149,6 +152,9 @@ class LoadStreams:
149
152
  cap.grab() # .read() = .grab() followed by .retrieve()
150
153
  if n % self.vid_stride == 0:
151
154
  success, im = cap.retrieve()
155
+ im = (
156
+ cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)[..., None] if self.cv2_flag == cv2.IMREAD_GRAYSCALE else im
157
+ )
152
158
  if not success:
153
159
  im = np.zeros(self.shape[i], dtype=np.uint8)
154
160
  LOGGER.warning("Video stream unresponsive, please check your IP camera connection.")
@@ -230,6 +236,7 @@ class LoadScreenshots:
230
236
  bs (int): Batch size, set to 1.
231
237
  fps (int): Frames per second, set to 30.
232
238
  monitor (Dict[str, int]): Monitor configuration details.
239
+ cv2_flag (int): OpenCV flag for image reading (grayscale or RGB).
233
240
 
234
241
  Methods:
235
242
  __iter__: Returns an iterator object.
@@ -241,7 +248,7 @@ class LoadScreenshots:
241
248
  ... print(f"Captured frame: {im.shape}")
242
249
  """
243
250
 
244
- def __init__(self, source):
251
+ def __init__(self, source, channels=3):
245
252
  """Initialize screenshot capture with specified screen and region parameters."""
246
253
  check_requirements("mss")
247
254
  import mss # noqa
@@ -259,6 +266,7 @@ class LoadScreenshots:
259
266
  self.sct = mss.mss()
260
267
  self.bs = 1
261
268
  self.fps = 30
269
+ self.cv2_flag = cv2.IMREAD_GRAYSCALE if channels == 1 else cv2.IMREAD_COLOR # grayscale or RGB
262
270
 
263
271
  # Parse monitor shape
264
272
  monitor = self.sct.monitors[self.screen]
@@ -275,6 +283,7 @@ class LoadScreenshots:
275
283
  def __next__(self):
276
284
  """Captures and returns the next screenshot as a numpy array using the mss library."""
277
285
  im0 = np.asarray(self.sct.grab(self.monitor))[:, :, :3] # BGRA to BGR
286
+ im0 = cv2.cvtColor(im0, cv2.COLOR_BGR2GRAY)[..., None] if self.cv2_flag == cv2.IMREAD_GRAYSCALE else im0
278
287
  s = f"screen {self.screen} (LTWH): {self.left},{self.top},{self.width},{self.height}: "
279
288
 
280
289
  self.frame += 1
@@ -395,6 +404,11 @@ class LoadImagesAndVideos:
395
404
 
396
405
  if success:
397
406
  success, im0 = self.cap.retrieve()
407
+ im0 = (
408
+ cv2.cvtColor(im0, cv2.COLOR_BGR2GRAY)[..., None]
409
+ if self.cv2_flag == cv2.IMREAD_GRAYSCALE
410
+ else im0
411
+ )
398
412
  if success:
399
413
  self.frame += 1
400
414
  paths.append(path)
@@ -497,6 +511,8 @@ class LoadPilAndNumpy:
497
511
  # adding new axis if it's grayscale, and converting to BGR if it's RGB
498
512
  im = im[..., None] if flag == "L" else im[..., ::-1]
499
513
  im = np.ascontiguousarray(im) # contiguous
514
+ elif im.ndim == 2: # grayscale in numpy form
515
+ im = im[..., None]
500
516
  return im
501
517
 
502
518
  def __len__(self):
@@ -424,8 +424,8 @@ def check_det_dataset(dataset, autodownload=True):
424
424
 
425
425
  # Resolve paths
426
426
  path = Path(extract_dir or data.get("path") or Path(data.get("yaml_file", "")).parent) # dataset root
427
- if not path.is_absolute():
428
- path = (DATASETS_DIR / path).resolve()
427
+ if not path.exists() and not path.is_absolute():
428
+ path = (DATASETS_DIR / path).resolve() # path relative to DATASETS_DIR
429
429
 
430
430
  # Set paths
431
431
  data["path"] = path # download scripts
@@ -89,6 +89,7 @@ from ultralytics.utils import (
89
89
  MACOS_VERSION,
90
90
  RKNN_CHIPS,
91
91
  ROOT,
92
+ SETTINGS,
92
93
  WINDOWS,
93
94
  YAML,
94
95
  callbacks,
@@ -106,7 +107,7 @@ from ultralytics.utils.downloads import attempt_download_asset, get_github_asset
106
107
  from ultralytics.utils.export import export_engine, export_onnx
107
108
  from ultralytics.utils.files import file_size, spaces_in_path
108
109
  from ultralytics.utils.ops import Profile, nms_rotated
109
- from ultralytics.utils.torch_utils import TORCH_1_13, get_latest_opset, select_device
110
+ from ultralytics.utils.torch_utils import TORCH_1_13, get_cpu_info, get_latest_opset, select_device
110
111
 
111
112
 
112
113
  def export_formats():
@@ -141,7 +142,7 @@ def export_formats():
141
142
  ["MNN", "mnn", ".mnn", True, True, ["batch", "half", "int8"]],
142
143
  ["NCNN", "ncnn", "_ncnn_model", True, True, ["batch", "half"]],
143
144
  ["IMX", "imx", "_imx_model", True, True, ["int8", "fraction"]],
144
- ["RKNN", "rknn", "_rknn_model", False, False, ["batch", "name", "int8"]],
145
+ ["RKNN", "rknn", "_rknn_model", False, False, ["batch", "name"]],
145
146
  ]
146
147
  return dict(zip(["Format", "Argument", "Suffix", "CPU", "GPU", "Arguments"], zip(*x)))
147
148
 
@@ -344,7 +345,6 @@ class Exporter:
344
345
  "See https://docs.ultralytics.com/models/yolo-world for details."
345
346
  )
346
347
  model.clip_model = None # openvino int8 export error: https://github.com/ultralytics/ultralytics/pull/18445
347
-
348
348
  if self.args.int8 and not self.args.data:
349
349
  self.args.data = DEFAULT_CFG.data or TASK2DATA[getattr(model, "task", "detect")] # assign default data
350
350
  LOGGER.warning(
@@ -352,6 +352,14 @@ class Exporter:
352
352
  )
353
353
  if tfjs and (ARM64 and LINUX):
354
354
  raise SystemError("TF.js exports are not currently supported on ARM64 Linux")
355
+ # Recommend OpenVINO if export and Intel CPU
356
+ if SETTINGS.get("openvino_msg"):
357
+ if "intel" in get_cpu_info().lower():
358
+ LOGGER.info(
359
+ "💡 ProTip: Export to OpenVINO format for best performance on Intel CPUs."
360
+ " Learn more at https://docs.ultralytics.com/integrations/openvino/"
361
+ )
362
+ SETTINGS["openvino_msg"] = False
355
363
 
356
364
  # Input
357
365
  im = torch.zeros(self.args.batch, model.yaml.get("channels", 3), *self.imgsz).to(self.device)
@@ -547,7 +555,7 @@ class Exporter:
547
555
  @try_export
548
556
  def export_onnx(self, prefix=colorstr("ONNX:")):
549
557
  """YOLO ONNX export."""
550
- requirements = ["onnx>=1.12.0"]
558
+ requirements = ["onnx>=1.12.0,<1.18.0"]
551
559
  if self.args.simplify:
552
560
  requirements += ["onnxslim>=0.1.46", "onnxruntime" + ("-gpu" if torch.cuda.is_available() else "")]
553
561
  check_requirements(requirements)
@@ -1113,8 +1121,8 @@ class Exporter:
1113
1121
  rknn = RKNN(verbose=False)
1114
1122
  rknn.config(mean_values=[[0, 0, 0]], std_values=[[255, 255, 255]], target_platform=self.args.name)
1115
1123
  rknn.load_onnx(model=f)
1116
- rknn.build(do_quantization=self.args.int8)
1117
- f = f.replace(".onnx", f"-{self.args.name}-int8.rknn" if self.args.int8 else f"-{self.args.name}-fp16.rknn")
1124
+ rknn.build(do_quantization=False) # TODO: Add quantization support
1125
+ f = f.replace(".onnx", f"-{self.args.name}.rknn")
1118
1126
  rknn.export_rknn(f"{export_path / f}")
1119
1127
  YAML.save(export_path / "metadata.yaml", self.metadata)
1120
1128
  return export_path, None
@@ -529,7 +529,7 @@ class Model(torch.nn.Module):
529
529
  - For SAM-type models, 'prompts' can be passed as a keyword argument.
530
530
  """
531
531
  if source is None:
532
- source = ASSETS
532
+ source = "https://ultralytics.com/images/boats.jpg" if self.task == "obb" else ASSETS
533
533
  LOGGER.warning(f"'source' is missing. Using 'source={source}'.")
534
534
 
535
535
  is_cli = (ARGV[0].endswith("yolo") or ARGV[0].endswith("ultralytics")) and any(
@@ -1032,7 +1032,7 @@ class Results(SimpleClass):
1032
1032
  conn.commit()
1033
1033
  conn.close()
1034
1034
 
1035
- LOGGER.info(f"Detection results successfully written to SQL table '{table_name}' in database '{db_path}'.")
1035
+ LOGGER.info(f"Detection results successfully written to SQL table '{table_name}' in database '{db_path}'.")
1036
1036
 
1037
1037
 
1038
1038
  class Boxes(BaseTensor):
@@ -51,7 +51,7 @@ class DetectionPredictor(BasePredictor):
51
51
  >>> results = predictor.predict("path/to/image.jpg")
52
52
  >>> processed_results = predictor.postprocess(preds, img, orig_imgs)
53
53
  """
54
- save_feats = getattr(self, "save_feats", False)
54
+ save_feats = getattr(self, "_feats", None) is not None
55
55
  preds = ops.non_max_suppression(
56
56
  preds,
57
57
  self.args.conf,
@@ -284,13 +284,15 @@ class BaseModel(torch.nn.Module):
284
284
  updated_csd = intersect_dicts(csd, self.state_dict()) # intersect
285
285
  self.load_state_dict(updated_csd, strict=False) # load
286
286
  len_updated_csd = len(updated_csd)
287
- first_conv = "model.0.conv.weight"
288
- if first_conv not in updated_csd: # mostly used to boost multi-channel training
289
- c1, c2, h, w = self.state_dict()[first_conv].shape
287
+ first_conv = "model.0.conv.weight" # hard-coded to yolo models for now
288
+ # mostly used to boost multi-channel training
289
+ state_dict = self.state_dict()
290
+ if first_conv not in updated_csd and first_conv in state_dict:
291
+ c1, c2, h, w = state_dict[first_conv].shape
290
292
  cc1, cc2, ch, cw = csd[first_conv].shape
291
293
  if ch == h and cw == w:
292
294
  c1, c2 = min(c1, cc1), min(c2, cc2)
293
- self.state_dict()[first_conv][:c1, :c2] = csd[first_conv][:c1, :c2]
295
+ state_dict[first_conv][:c1, :c2] = csd[first_conv][:c1, :c2]
294
296
  len_updated_csd += 1
295
297
  if verbose:
296
298
  LOGGER.info(f"Transferred {len_updated_csd}/{len(self.model.state_dict())} items from pretrained weights")
@@ -1,5 +1,7 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
+ from collections import defaultdict
4
+
3
5
  from ultralytics.solutions.solutions import BaseSolution, SolutionAnnotator, SolutionResults
4
6
 
5
7
 
@@ -11,10 +13,7 @@ class AIGym(BaseSolution):
11
13
  repetitions of exercises based on predefined angle thresholds for up and down positions.
12
14
 
13
15
  Attributes:
14
- count (List[int]): Repetition counts for each detected person.
15
- angle (List[float]): Current angle of the tracked body part for each person.
16
- stage (List[str]): Current exercise stage ('up', 'down', or '-') for each person.
17
- initial_stage (str | None): Initial stage of the exercise.
16
+ states (Dict[float, int, str]): Stores per-track angle, count, and stage for workout monitoring.
18
17
  up_angle (float): Angle threshold for considering the 'up' position of an exercise.
19
18
  down_angle (float): Angle threshold for considering the 'down' position of an exercise.
20
19
  kpts (List[int]): Indices of keypoints used for angle calculation.
@@ -41,12 +40,9 @@ class AIGym(BaseSolution):
41
40
  """
42
41
  kwargs["model"] = kwargs.get("model", "yolo11n-pose.pt")
43
42
  super().__init__(**kwargs)
44
- self.count = [] # List for counts, necessary where there are multiple objects in frame
45
- self.angle = [] # List for angle, necessary where there are multiple objects in frame
46
- self.stage = [] # List for stage, necessary where there are multiple objects in frame
43
+ self.states = defaultdict(lambda: {"angle": 0, "count": 0, "stage": "-"}) # Dict for count, angle and stage
47
44
 
48
45
  # Extract details from CFG single time for usage later
49
- self.initial_stage = None
50
46
  self.up_angle = float(self.CFG["up_angle"]) # Pose up predefined angle to consider up pose
51
47
  self.down_angle = float(self.CFG["down_angle"]) # Pose down predefined angle to consider down pose
52
48
  self.kpts = self.CFG["kpts"] # User selected kpts of workouts storage for further usage
@@ -81,33 +77,30 @@ class AIGym(BaseSolution):
81
77
  tracks = self.tracks[0]
82
78
 
83
79
  if tracks.boxes.id is not None:
84
- if len(tracks) > len(self.count): # Add new entries for newly detected people
85
- new_human = len(tracks) - len(self.count)
86
- self.angle += [0] * new_human
87
- self.count += [0] * new_human
88
- self.stage += ["-"] * new_human
89
-
90
- # Enumerate over keypoints
91
- for ind, k in enumerate(reversed(tracks.keypoints.data)):
80
+ track_ids = tracks.boxes.id.cpu().tolist()
81
+ kpt_data = tracks.keypoints.data.cpu() # Avoid repeated .cpu() calls
82
+
83
+ for i, k in enumerate(kpt_data):
84
+ track_id = int(track_ids[i]) # get track id
85
+ state = self.states[track_id] # get state details
92
86
  # Get keypoints and estimate the angle
93
- kpts = [k[int(self.kpts[i])].cpu() for i in range(3)]
94
- self.angle[ind] = annotator.estimate_pose_angle(*kpts)
87
+ state["angle"] = annotator.estimate_pose_angle(*[k[int(idx)] for idx in self.kpts])
95
88
  annotator.draw_specific_kpts(k, self.kpts, radius=self.line_width * 3)
96
89
 
97
90
  # Determine stage and count logic based on angle thresholds
98
- if self.angle[ind] < self.down_angle:
99
- if self.stage[ind] == "up":
100
- self.count[ind] += 1
101
- self.stage[ind] = "down"
102
- elif self.angle[ind] > self.up_angle:
103
- self.stage[ind] = "up"
91
+ if state["angle"] < self.down_angle:
92
+ if state["stage"] == "up":
93
+ state["count"] += 1
94
+ state["stage"] = "down"
95
+ elif state["angle"] > self.up_angle:
96
+ state["stage"] = "up"
104
97
 
105
98
  # Display angle, count, and stage text
106
99
  if self.show_labels:
107
100
  annotator.plot_angle_and_count_and_stage(
108
- angle_text=self.angle[ind], # angle text for display
109
- count_text=self.count[ind], # count text for workouts
110
- stage_text=self.stage[ind], # stage position text
101
+ angle_text=state["angle"], # angle text for display
102
+ count_text=state["count"], # count text for workouts
103
+ stage_text=state["stage"], # stage position text
111
104
  center_kpt=k[int(self.kpts[1])], # center keypoint for display
112
105
  )
113
106
  plot_im = annotator.result()
@@ -116,8 +109,8 @@ class AIGym(BaseSolution):
116
109
  # Return SolutionResults
117
110
  return SolutionResults(
118
111
  plot_im=plot_im,
119
- workout_count=self.count,
120
- workout_stage=self.stage,
121
- workout_angle=self.angle,
112
+ workout_count=[v["count"] for v in self.states.values()],
113
+ workout_stage=[v["stage"] for v in self.states.values()],
114
+ workout_angle=[v["angle"] for v in self.states.values()],
122
115
  total_tracks=len(self.track_ids),
123
116
  )
@@ -99,7 +99,6 @@ class Heatmap(ObjectCounter):
99
99
  if self.region is not None:
100
100
  self.annotator.draw_region(reg_pts=self.region, color=(104, 0, 123), thickness=self.line_width * 2)
101
101
  self.store_tracking_history(track_id, box) # Store track history
102
- self.store_classwise_counts(cls) # Store classwise counts in dict
103
102
  # Get previous position if available
104
103
  prev_position = None
105
104
  if len(self.track_history[track_id]) > 1:
@@ -123,6 +122,6 @@ class Heatmap(ObjectCounter):
123
122
  plot_im=plot_im,
124
123
  in_count=self.in_count,
125
124
  out_count=self.out_count,
126
- classwise_count=self.classwise_counts,
125
+ classwise_count=dict(self.classwise_counts),
127
126
  total_tracks=len(self.track_ids),
128
127
  )
@@ -1,5 +1,7 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
+ from collections import defaultdict
4
+
3
5
  from ultralytics.solutions.solutions import BaseSolution, SolutionAnnotator, SolutionResults
4
6
  from ultralytics.utils.plotting import colors
5
7
 
@@ -22,7 +24,6 @@ class ObjectCounter(BaseSolution):
22
24
 
23
25
  Methods:
24
26
  count_objects: Counts objects within a polygonal or linear region.
25
- store_classwise_counts: Initializes class-wise counts if not already present.
26
27
  display_counts: Displays object counts on the frame.
27
28
  process: Processes input data (frames or object tracks) and updates counts.
28
29
 
@@ -40,7 +41,7 @@ class ObjectCounter(BaseSolution):
40
41
  self.in_count = 0 # Counter for objects moving inward
41
42
  self.out_count = 0 # Counter for objects moving outward
42
43
  self.counted_ids = [] # List of IDs of objects that have been counted
43
- self.classwise_counts = {} # Dictionary for counts, categorized by object class
44
+ self.classwise_counts = defaultdict(lambda: {"IN": 0, "OUT": 0}) # Dictionary for counts, categorized by class
44
45
  self.region_initialized = False # Flag indicating whether the region has been initialized
45
46
 
46
47
  self.show_in = self.CFG["show_in"]
@@ -110,22 +111,6 @@ class ObjectCounter(BaseSolution):
110
111
  self.classwise_counts[self.names[cls]]["OUT"] += 1
111
112
  self.counted_ids.append(track_id)
112
113
 
113
- def store_classwise_counts(self, cls):
114
- """
115
- Initialize class-wise counts for a specific object class if not already present.
116
-
117
- Args:
118
- cls (int): Class index for classwise count updates.
119
-
120
- Examples:
121
- >>> counter = ObjectCounter()
122
- >>> counter.store_classwise_counts(0) # Initialize counts for class index 0
123
- >>> print(counter.classwise_counts)
124
- {'person': {'IN': 0, 'OUT': 0}}
125
- """
126
- if self.names[cls] not in self.classwise_counts:
127
- self.classwise_counts[self.names[cls]] = {"IN": 0, "OUT": 0}
128
-
129
114
  def display_counts(self, plot_im):
130
115
  """
131
116
  Display object counts on the input image or frame.
@@ -189,7 +174,6 @@ class ObjectCounter(BaseSolution):
189
174
  box, label=self.adjust_box_label(cls, conf, track_id), color=colors(cls, True), rotated=is_obb
190
175
  )
191
176
  self.store_tracking_history(track_id, box, is_obb=is_obb) # Store track history
192
- self.store_classwise_counts(cls) # Store classwise counts in dict
193
177
 
194
178
  # Store previous position of track for object counting
195
179
  prev_position = None
@@ -206,6 +190,6 @@ class ObjectCounter(BaseSolution):
206
190
  plot_im=plot_im,
207
191
  in_count=self.in_count,
208
192
  out_count=self.out_count,
209
- classwise_count=self.classwise_counts,
193
+ classwise_count=dict(self.classwise_counts),
210
194
  total_tracks=len(self.track_ids),
211
195
  )
@@ -110,7 +110,7 @@ class SecurityAlarm(BaseSolution):
110
110
  # Send the email
111
111
  try:
112
112
  self.server.send_message(message)
113
- LOGGER.info("Email sent successfully!")
113
+ LOGGER.info("Email sent successfully!")
114
114
  except Exception as e:
115
115
  LOGGER.error(f"Failed to send email: {e}")
116
116
 
@@ -330,7 +330,11 @@ class BYTETracker:
330
330
  # Predict the current location with KF
331
331
  self.multi_predict(strack_pool)
332
332
  if hasattr(self, "gmc") and img is not None:
333
- warp = self.gmc.apply(img, dets)
333
+ # use try-except here to bypass errors from gmc module
334
+ try:
335
+ warp = self.gmc.apply(img, dets)
336
+ except Exception:
337
+ warp = np.eye(2, 3)
334
338
  STrack.multi_gmc(strack_pool, warp)
335
339
  STrack.multi_gmc(unconfirmed, warp)
336
340
 
@@ -45,7 +45,8 @@ def on_predict_start(predictor: object, persist: bool = False) -> None:
45
45
  raise AssertionError(f"Only 'bytetrack' and 'botsort' are supported for now, but got '{cfg.tracker_type}'")
46
46
 
47
47
  predictor._feats = None # reset in case used earlier
48
- predictor.save_feats = False
48
+ if hasattr(predictor, "_hook"):
49
+ predictor._hook.remove()
49
50
  if cfg.tracker_type == "botsort" and cfg.with_reid and cfg.model == "auto":
50
51
  from ultralytics.nn.modules.head import Detect
51
52
 
@@ -56,13 +57,11 @@ def on_predict_start(predictor: object, persist: bool = False) -> None:
56
57
  ):
57
58
  cfg.model = "yolo11n-cls.pt"
58
59
  else:
59
- predictor.save_feats = True
60
-
61
60
  # Register hook to extract input of Detect layer
62
61
  def pre_hook(module, input):
63
- predictor._feats = [t.clone() for t in input[0]]
62
+ predictor._feats = list(input[0]) # unroll to new list to avoid mutation in forward
64
63
 
65
- predictor.model.model.model[-1].register_forward_pre_hook(pre_hook)
64
+ predictor._hook = predictor.model.model.model[-1].register_forward_pre_hook(pre_hook)
66
65
 
67
66
  trackers = []
68
67
  for _ in range(predictor.dataset.bs):