ultralytics 8.2.25__tar.gz → 8.2.27__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ultralytics might be problematic. Click here for more details.

Files changed (223) hide show
  1. {ultralytics-8.2.25 → ultralytics-8.2.27}/PKG-INFO +1 -1
  2. {ultralytics-8.2.25 → ultralytics-8.2.27}/pyproject.toml +1 -4
  3. {ultralytics-8.2.25 → ultralytics-8.2.27}/tests/test_cli.py +17 -15
  4. {ultralytics-8.2.25 → ultralytics-8.2.27}/tests/test_cuda.py +1 -0
  5. {ultralytics-8.2.25 → ultralytics-8.2.27}/tests/test_exports.py +5 -2
  6. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/__init__.py +1 -1
  7. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/default.yaml +1 -1
  8. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/engine/exporter.py +15 -15
  9. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/fastsam/prompt.py +2 -6
  10. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/detect/val.py +9 -5
  11. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/pose/val.py +2 -1
  12. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/segment/val.py +2 -1
  13. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/nn/modules/__init__.py +1 -1
  14. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/nn/tasks.py +2 -2
  15. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/solutions/analytics.py +51 -2
  16. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/benchmarks.py +3 -1
  17. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/checks.py +7 -1
  18. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/ops.py +21 -22
  19. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics.egg-info/PKG-INFO +1 -1
  20. {ultralytics-8.2.25 → ultralytics-8.2.27}/LICENSE +0 -0
  21. {ultralytics-8.2.25 → ultralytics-8.2.27}/README.md +0 -0
  22. {ultralytics-8.2.25 → ultralytics-8.2.27}/setup.cfg +0 -0
  23. {ultralytics-8.2.25 → ultralytics-8.2.27}/tests/__init__.py +0 -0
  24. {ultralytics-8.2.25 → ultralytics-8.2.27}/tests/conftest.py +0 -0
  25. {ultralytics-8.2.25 → ultralytics-8.2.27}/tests/test_engine.py +0 -0
  26. {ultralytics-8.2.25 → ultralytics-8.2.27}/tests/test_explorer.py +0 -0
  27. {ultralytics-8.2.25 → ultralytics-8.2.27}/tests/test_integrations.py +0 -0
  28. {ultralytics-8.2.25 → ultralytics-8.2.27}/tests/test_python.py +0 -0
  29. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/assets/bus.jpg +0 -0
  30. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/assets/zidane.jpg +0 -0
  31. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/__init__.py +0 -0
  32. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/Argoverse.yaml +0 -0
  33. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/DOTAv1.5.yaml +0 -0
  34. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/DOTAv1.yaml +0 -0
  35. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/GlobalWheat2020.yaml +0 -0
  36. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/ImageNet.yaml +0 -0
  37. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/Objects365.yaml +0 -0
  38. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/SKU-110K.yaml +0 -0
  39. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/VOC.yaml +0 -0
  40. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/VisDrone.yaml +0 -0
  41. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/african-wildlife.yaml +0 -0
  42. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/brain-tumor.yaml +0 -0
  43. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/carparts-seg.yaml +0 -0
  44. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/coco-pose.yaml +0 -0
  45. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/coco.yaml +0 -0
  46. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/coco128-seg.yaml +0 -0
  47. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/coco128.yaml +0 -0
  48. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/coco8-pose.yaml +0 -0
  49. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/coco8-seg.yaml +0 -0
  50. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/coco8.yaml +0 -0
  51. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/crack-seg.yaml +0 -0
  52. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/dota8.yaml +0 -0
  53. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/lvis.yaml +0 -0
  54. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/open-images-v7.yaml +0 -0
  55. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/package-seg.yaml +0 -0
  56. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/signature.yaml +0 -0
  57. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/tiger-pose.yaml +0 -0
  58. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/datasets/xView.yaml +0 -0
  59. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +0 -0
  60. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +0 -0
  61. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +0 -0
  62. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +0 -0
  63. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v3/yolov3-spp.yaml +0 -0
  64. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v3/yolov3-tiny.yaml +0 -0
  65. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v3/yolov3.yaml +0 -0
  66. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v5/yolov5-p6.yaml +0 -0
  67. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v5/yolov5.yaml +0 -0
  68. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v6/yolov6.yaml +0 -0
  69. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +0 -0
  70. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +0 -0
  71. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-cls.yaml +0 -0
  72. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +0 -0
  73. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +0 -0
  74. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-ghost.yaml +0 -0
  75. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-obb.yaml +0 -0
  76. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-p2.yaml +0 -0
  77. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-p6.yaml +0 -0
  78. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +0 -0
  79. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-pose.yaml +0 -0
  80. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +0 -0
  81. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +0 -0
  82. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-seg.yaml +0 -0
  83. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-world.yaml +0 -0
  84. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8-worldv2.yaml +0 -0
  85. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v8/yolov8.yaml +0 -0
  86. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v9/yolov9c-seg.yaml +0 -0
  87. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v9/yolov9c.yaml +0 -0
  88. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v9/yolov9e-seg.yaml +0 -0
  89. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/models/v9/yolov9e.yaml +0 -0
  90. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/trackers/botsort.yaml +0 -0
  91. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/cfg/trackers/bytetrack.yaml +0 -0
  92. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/__init__.py +0 -0
  93. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/annotator.py +0 -0
  94. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/augment.py +0 -0
  95. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/base.py +0 -0
  96. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/build.py +0 -0
  97. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/converter.py +0 -0
  98. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/dataset.py +0 -0
  99. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/explorer/__init__.py +0 -0
  100. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/explorer/explorer.py +0 -0
  101. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/explorer/gui/__init__.py +0 -0
  102. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/explorer/gui/dash.py +0 -0
  103. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/explorer/utils.py +0 -0
  104. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/loaders.py +0 -0
  105. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/split_dota.py +0 -0
  106. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/data/utils.py +0 -0
  107. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/engine/__init__.py +0 -0
  108. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/engine/model.py +0 -0
  109. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/engine/predictor.py +0 -0
  110. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/engine/results.py +0 -0
  111. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/engine/trainer.py +0 -0
  112. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/engine/tuner.py +0 -0
  113. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/engine/validator.py +0 -0
  114. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/hub/__init__.py +0 -0
  115. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/hub/auth.py +0 -0
  116. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/hub/session.py +0 -0
  117. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/hub/utils.py +0 -0
  118. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/__init__.py +0 -0
  119. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/fastsam/__init__.py +0 -0
  120. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/fastsam/model.py +0 -0
  121. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/fastsam/predict.py +0 -0
  122. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/fastsam/utils.py +0 -0
  123. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/fastsam/val.py +0 -0
  124. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/nas/__init__.py +0 -0
  125. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/nas/model.py +0 -0
  126. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/nas/predict.py +0 -0
  127. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/nas/val.py +0 -0
  128. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/rtdetr/__init__.py +0 -0
  129. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/rtdetr/model.py +0 -0
  130. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/rtdetr/predict.py +0 -0
  131. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/rtdetr/train.py +0 -0
  132. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/rtdetr/val.py +0 -0
  133. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/sam/__init__.py +0 -0
  134. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/sam/amg.py +0 -0
  135. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/sam/build.py +0 -0
  136. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/sam/model.py +0 -0
  137. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/sam/modules/__init__.py +0 -0
  138. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/sam/modules/decoders.py +0 -0
  139. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/sam/modules/encoders.py +0 -0
  140. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/sam/modules/sam.py +0 -0
  141. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/sam/modules/tiny_encoder.py +0 -0
  142. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/sam/modules/transformer.py +0 -0
  143. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/sam/predict.py +0 -0
  144. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/utils/__init__.py +0 -0
  145. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/utils/loss.py +0 -0
  146. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/utils/ops.py +0 -0
  147. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/__init__.py +0 -0
  148. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/classify/__init__.py +0 -0
  149. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/classify/predict.py +0 -0
  150. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/classify/train.py +0 -0
  151. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/classify/val.py +0 -0
  152. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/detect/__init__.py +0 -0
  153. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/detect/predict.py +0 -0
  154. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/detect/train.py +0 -0
  155. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/model.py +0 -0
  156. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/obb/__init__.py +0 -0
  157. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/obb/predict.py +0 -0
  158. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/obb/train.py +0 -0
  159. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/obb/val.py +0 -0
  160. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/pose/__init__.py +0 -0
  161. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/pose/predict.py +0 -0
  162. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/pose/train.py +0 -0
  163. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/segment/__init__.py +0 -0
  164. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/segment/predict.py +0 -0
  165. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/segment/train.py +0 -0
  166. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/world/__init__.py +0 -0
  167. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/world/train.py +0 -0
  168. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/models/yolo/world/train_world.py +0 -0
  169. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/nn/__init__.py +0 -0
  170. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/nn/autobackend.py +0 -0
  171. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/nn/modules/block.py +0 -0
  172. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/nn/modules/conv.py +0 -0
  173. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/nn/modules/head.py +0 -0
  174. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/nn/modules/transformer.py +0 -0
  175. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/nn/modules/utils.py +0 -0
  176. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/solutions/__init__.py +0 -0
  177. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/solutions/ai_gym.py +0 -0
  178. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/solutions/distance_calculation.py +0 -0
  179. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/solutions/heatmap.py +0 -0
  180. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/solutions/object_counter.py +0 -0
  181. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/solutions/parking_management.py +0 -0
  182. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/solutions/queue_management.py +0 -0
  183. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/solutions/speed_estimation.py +0 -0
  184. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/trackers/__init__.py +0 -0
  185. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/trackers/basetrack.py +0 -0
  186. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/trackers/bot_sort.py +0 -0
  187. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/trackers/byte_tracker.py +0 -0
  188. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/trackers/track.py +0 -0
  189. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/trackers/utils/__init__.py +0 -0
  190. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/trackers/utils/gmc.py +0 -0
  191. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/trackers/utils/kalman_filter.py +0 -0
  192. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/trackers/utils/matching.py +0 -0
  193. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/__init__.py +0 -0
  194. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/autobatch.py +0 -0
  195. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/callbacks/__init__.py +0 -0
  196. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/callbacks/base.py +0 -0
  197. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/callbacks/clearml.py +0 -0
  198. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/callbacks/comet.py +0 -0
  199. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/callbacks/dvc.py +0 -0
  200. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/callbacks/hub.py +0 -0
  201. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/callbacks/mlflow.py +0 -0
  202. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/callbacks/neptune.py +0 -0
  203. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/callbacks/raytune.py +0 -0
  204. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/callbacks/tensorboard.py +0 -0
  205. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/callbacks/wb.py +0 -0
  206. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/dist.py +0 -0
  207. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/downloads.py +0 -0
  208. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/errors.py +0 -0
  209. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/files.py +0 -0
  210. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/instance.py +0 -0
  211. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/loss.py +0 -0
  212. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/metrics.py +0 -0
  213. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/patches.py +0 -0
  214. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/plotting.py +0 -0
  215. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/tal.py +0 -0
  216. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/torch_utils.py +0 -0
  217. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/triton.py +0 -0
  218. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics/utils/tuner.py +0 -0
  219. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics.egg-info/SOURCES.txt +0 -0
  220. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics.egg-info/dependency_links.txt +0 -0
  221. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics.egg-info/entry_points.txt +0 -0
  222. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics.egg-info/requires.txt +0 -0
  223. {ultralytics-8.2.25 → ultralytics-8.2.27}/ultralytics.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.2.25
3
+ Version: 8.2.27
4
4
  Summary: Ultralytics YOLOv8 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author: Glenn Jocher, Ayush Chaurasia, Jing Qiu
6
6
  Maintainer: Glenn Jocher, Ayush Chaurasia, Jing Qiu
@@ -102,7 +102,7 @@ export = [
102
102
  "openvino>=2024.0.0", # OpenVINO export
103
103
  "tensorflow>=2.0.0", # TF bug https://github.com/ultralytics/ultralytics/issues/5161
104
104
  "tensorflowjs>=3.9.0", # TF.js export, automatically installs tensorflow
105
- "keras", # not installed auotomatically by tensorflow>=2.16
105
+ "keras", # not installed automatically by tensorflow>=2.16
106
106
  "flatbuffers>=23.5.26,<100; platform_machine == 'aarch64'", # update old 'flatbuffers' included inside tensorflow package
107
107
  "numpy==1.23.5; platform_machine == 'aarch64'", # fix error: `np.bool` was a deprecated alias for the builtin `bool` when using TensorRT models on NVIDIA Jetson
108
108
  "h5py!=3.11.0; platform_machine == 'aarch64'", # fix h5py build issues due to missing aarch64 wheels in 3.11 release
@@ -112,9 +112,6 @@ explorer = [
112
112
  "duckdb<=0.9.2", # SQL queries, duckdb==0.10.0 bug https://github.com/ultralytics/ultralytics/pull/8181
113
113
  "streamlit", # visualizing with GUI
114
114
  ]
115
- # tflite-support # for TFLite model metadata
116
- # nvidia-pyindex # TensorRT export
117
- # nvidia-tensorrt # TensorRT export
118
115
  logging = [
119
116
  "comet", # https://docs.ultralytics.com/integrations/comet/
120
117
  "tensorboard>=2.13.0",
@@ -3,6 +3,7 @@
3
3
  import subprocess
4
4
 
5
5
  import pytest
6
+ from PIL import Image
6
7
 
7
8
  from tests import CUDA_DEVICE_COUNT, CUDA_IS_AVAILABLE
8
9
  from ultralytics.cfg import TASK2DATA, TASK2MODEL, TASKS
@@ -74,26 +75,27 @@ def test_fastsam(task="segment", model=WEIGHTS_DIR / "FastSAM-s.pt", data="coco8
74
75
  sam_model = FastSAM(model) # or FastSAM-x.pt
75
76
 
76
77
  # Run inference on an image
77
- everything_results = sam_model(source, device="cpu", retina_masks=True, imgsz=1024, conf=0.4, iou=0.9)
78
+ for s in (source, Image.open(source)):
79
+ everything_results = sam_model(s, device="cpu", retina_masks=True, imgsz=320, conf=0.4, iou=0.9)
78
80
 
79
- # Remove small regions
80
- new_masks, _ = Predictor.remove_small_regions(everything_results[0].masks.data, min_area=20)
81
+ # Remove small regions
82
+ new_masks, _ = Predictor.remove_small_regions(everything_results[0].masks.data, min_area=20)
81
83
 
82
- # Everything prompt
83
- prompt_process = FastSAMPrompt(source, everything_results, device="cpu")
84
- ann = prompt_process.everything_prompt()
84
+ # Everything prompt
85
+ prompt_process = FastSAMPrompt(s, everything_results, device="cpu")
86
+ ann = prompt_process.everything_prompt()
85
87
 
86
- # Bbox default shape [0,0,0,0] -> [x1,y1,x2,y2]
87
- ann = prompt_process.box_prompt(bbox=[200, 200, 300, 300])
88
+ # Bbox default shape [0,0,0,0] -> [x1,y1,x2,y2]
89
+ ann = prompt_process.box_prompt(bbox=[200, 200, 300, 300])
88
90
 
89
- # Text prompt
90
- ann = prompt_process.text_prompt(text="a photo of a dog")
91
+ # Text prompt
92
+ ann = prompt_process.text_prompt(text="a photo of a dog")
91
93
 
92
- # Point prompt
93
- # Points default [[0,0]] [[x1,y1],[x2,y2]]
94
- # Point_label default [0] [1,0] 0:background, 1:foreground
95
- ann = prompt_process.point_prompt(points=[[200, 200]], pointlabel=[1])
96
- prompt_process.plot(annotations=ann, output="./")
94
+ # Point prompt
95
+ # Points default [[0,0]] [[x1,y1],[x2,y2]]
96
+ # Point_label default [0] [1,0] 0:background, 1:foreground
97
+ ann = prompt_process.point_prompt(points=[[200, 200]], pointlabel=[1])
98
+ prompt_process.plot(annotations=ann, output="./")
97
99
 
98
100
 
99
101
  def test_mobilesam():
@@ -41,6 +41,7 @@ def test_export_engine_matrix(task, dynamic, int8, half, batch):
41
41
  batch=batch,
42
42
  data=TASK2DATA[task],
43
43
  workspace=1, # reduce workspace GB for less resource utilization during testing
44
+ simplify=True, # use 'onnxslim'
44
45
  )
45
46
  YOLO(file)([SOURCE] * batch, imgsz=64 if dynamic else 32) # exported model inference
46
47
  Path(file).unlink() # cleanup
@@ -72,8 +72,10 @@ def test_export_openvino_matrix(task, dynamic, int8, half, batch):
72
72
 
73
73
 
74
74
  @pytest.mark.slow
75
- @pytest.mark.parametrize("task, dynamic, int8, half, batch", product(TASKS, [True, False], [False], [False], [1, 2]))
76
- def test_export_onnx_matrix(task, dynamic, int8, half, batch):
75
+ @pytest.mark.parametrize(
76
+ "task, dynamic, int8, half, batch, simplify", product(TASKS, [True, False], [False], [False], [1, 2], [True, False])
77
+ )
78
+ def test_export_onnx_matrix(task, dynamic, int8, half, batch, simplify):
77
79
  """Test YOLO exports to ONNX format."""
78
80
  file = YOLO(TASK2MODEL[task]).export(
79
81
  format="onnx",
@@ -82,6 +84,7 @@ def test_export_onnx_matrix(task, dynamic, int8, half, batch):
82
84
  int8=int8,
83
85
  half=half,
84
86
  batch=batch,
87
+ simplify=simplify,
85
88
  )
86
89
  YOLO(file)([SOURCE] * batch, imgsz=64 if dynamic else 32) # exported model inference
87
90
  Path(file).unlink() # cleanup
@@ -1,6 +1,6 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
- __version__ = "8.2.25"
3
+ __version__ = "8.2.27"
4
4
 
5
5
  import os
6
6
 
@@ -81,7 +81,7 @@ keras: False # (bool) use Kera=s
81
81
  optimize: False # (bool) TorchScript: optimize for mobile
82
82
  int8: False # (bool) CoreML/TF INT8 quantization
83
83
  dynamic: False # (bool) ONNX/TF/TensorRT: dynamic axes
84
- simplify: False # (bool) ONNX: simplify model
84
+ simplify: False # (bool) ONNX: simplify model using `onnxslim`
85
85
  opset: # (int, optional) ONNX: opset version
86
86
  workspace: 4 # (int) TensorRT: workspace size (GB)
87
87
  nms: False # (bool) CoreML: add NMS
@@ -384,9 +384,7 @@ class Exporter:
384
384
  """YOLOv8 ONNX export."""
385
385
  requirements = ["onnx>=1.12.0"]
386
386
  if self.args.simplify:
387
- requirements += ["onnxsim>=0.4.33", "onnxruntime-gpu" if torch.cuda.is_available() else "onnxruntime"]
388
- if ARM64:
389
- check_requirements("cmake") # 'cmake' is needed to build onnxsim on aarch64
387
+ requirements += ["onnxslim==0.1.28", "onnxruntime" + ("-gpu" if torch.cuda.is_available() else "")]
390
388
  check_requirements(requirements)
391
389
  import onnx # noqa
392
390
 
@@ -423,14 +421,17 @@ class Exporter:
423
421
  # Simplify
424
422
  if self.args.simplify:
425
423
  try:
426
- import onnxsim
424
+ import onnxslim
427
425
 
428
- LOGGER.info(f"{prefix} simplifying with onnxsim {onnxsim.__version__}...")
429
- # subprocess.run(f'onnxsim "{f}" "{f}"', shell=True)
430
- model_onnx, check = onnxsim.simplify(model_onnx)
431
- assert check, "Simplified ONNX model could not be validated"
426
+ LOGGER.info(f"{prefix} slimming with onnxslim {onnxslim.__version__}...")
427
+ model_onnx = onnxslim.slim(model_onnx)
428
+
429
+ # ONNX Simplifier (deprecated as must be compiled with 'cmake' in aarch64 and Conda CI environments)
430
+ # import onnxsim
431
+ # model_onnx, check = onnxsim.simplify(model_onnx)
432
+ # assert check, "Simplified ONNX model could not be validated"
432
433
  except Exception as e:
433
- LOGGER.info(f"{prefix} simplifier failure: {e}")
434
+ LOGGER.warning(f"{prefix} simplifier failure: {e}")
434
435
 
435
436
  # Metadata
436
437
  for k, v in self.metadata.items():
@@ -674,8 +675,8 @@ class Exporter:
674
675
  def export_engine(self, prefix=colorstr("TensorRT:")):
675
676
  """YOLOv8 TensorRT export https://developer.nvidia.com/tensorrt."""
676
677
  assert self.im.device.type != "cpu", "export running on CPU but must be on GPU, i.e. use 'device=0'"
677
- self.args.simplify = True
678
- f_onnx, _ = self.export_onnx() # run before trt import https://github.com/ultralytics/ultralytics/issues/7016
678
+ # self.args.simplify = True
679
+ f_onnx, _ = self.export_onnx() # run before TRT import https://github.com/ultralytics/ultralytics/issues/7016
679
680
 
680
681
  try:
681
682
  import tensorrt as trt # noqa
@@ -815,15 +816,14 @@ class Exporter:
815
816
  version = ">=2.0.0"
816
817
  check_requirements(f"tensorflow{suffix}{version}")
817
818
  import tensorflow as tf # noqa
818
- if ARM64:
819
- check_requirements("cmake") # 'cmake' is needed to build onnxsim on aarch64
820
819
  check_requirements(
821
820
  (
822
- "keras",
821
+ "keras", # required by onnx2tf package
822
+ "tf_keras", # required by onnx2tf package
823
823
  "onnx>=1.12.0",
824
824
  "onnx2tf>1.17.5,<=1.22.3",
825
825
  "sng4onnx>=1.0.1",
826
- "onnxsim>=0.4.33",
826
+ "onnxslim==0.1.28",
827
827
  "onnx_graphsurgeon>=0.3.26",
828
828
  "tflite_support<=0.4.3" if IS_JETSON else "tflite_support", # fix ImportError 'GLIBCXX_3.4.29'
829
829
  "flatbuffers>=23.5.26,<100", # update old 'flatbuffers' included inside tensorflow package
@@ -24,6 +24,8 @@ class FastSAMPrompt:
24
24
 
25
25
  def __init__(self, source, results, device="cuda") -> None:
26
26
  """Initializes FastSAMPrompt with given source, results and device, and assigns clip for linear assignment."""
27
+ if isinstance(source, (str, Path)) and os.path.isdir(source):
28
+ raise ValueError(f"FastSAM only accepts image paths and PIL Image sources, not directories.")
27
29
  self.device = device
28
30
  self.results = results
29
31
  self.source = source
@@ -261,8 +263,6 @@ class FastSAMPrompt:
261
263
 
262
264
  def _crop_image(self, format_results):
263
265
  """Crops an image based on provided annotation format and returns cropped images and related data."""
264
- if os.path.isdir(self.source):
265
- raise ValueError(f"'{self.source}' is a directory, not a valid source for this function.")
266
266
  image = Image.fromarray(cv2.cvtColor(self.results[0].orig_img, cv2.COLOR_BGR2RGB))
267
267
  ori_w, ori_h = image.size
268
268
  annotations = format_results
@@ -287,8 +287,6 @@ class FastSAMPrompt:
287
287
  """Modifies the bounding box properties and calculates IoU between masks and bounding box."""
288
288
  if self.results[0].masks is not None:
289
289
  assert bbox[2] != 0 and bbox[3] != 0
290
- if os.path.isdir(self.source):
291
- raise ValueError(f"'{self.source}' is a directory, not a valid source for this function.")
292
290
  masks = self.results[0].masks.data
293
291
  target_height, target_width = self.results[0].orig_shape
294
292
  h = masks.shape[1]
@@ -321,8 +319,6 @@ class FastSAMPrompt:
321
319
  def point_prompt(self, points, pointlabel): # numpy
322
320
  """Adjusts points on detected masks based on user input and returns the modified results."""
323
321
  if self.results[0].masks is not None:
324
- if os.path.isdir(self.source):
325
- raise ValueError(f"'{self.source}' is a directory, not a valid source for this function.")
326
322
  masks = self._format_results(self.results[0], 0)
327
323
  target_height, target_width = self.results[0].orig_shape
328
324
  h = masks[0]["segmentation"].shape[0]
@@ -32,6 +32,7 @@ class DetectionValidator(BaseValidator):
32
32
  """Initialize detection model with necessary variables and settings."""
33
33
  super().__init__(dataloader, save_dir, pbar, args, _callbacks)
34
34
  self.nt_per_class = None
35
+ self.nt_per_image = None
35
36
  self.is_coco = False
36
37
  self.is_lvis = False
37
38
  self.class_map = None
@@ -77,7 +78,7 @@ class DetectionValidator(BaseValidator):
77
78
  self.confusion_matrix = ConfusionMatrix(nc=self.nc, conf=self.args.conf)
78
79
  self.seen = 0
79
80
  self.jdict = []
80
- self.stats = dict(tp=[], conf=[], pred_cls=[], target_cls=[])
81
+ self.stats = dict(tp=[], conf=[], pred_cls=[], target_cls=[], target_img=[])
81
82
 
82
83
  def get_desc(self):
83
84
  """Return a formatted string summarizing class metrics of YOLO model."""
@@ -130,6 +131,7 @@ class DetectionValidator(BaseValidator):
130
131
  cls, bbox = pbatch.pop("cls"), pbatch.pop("bbox")
131
132
  nl = len(cls)
132
133
  stat["target_cls"] = cls
134
+ stat["target_img"] = cls.unique()
133
135
  if npr == 0:
134
136
  if nl:
135
137
  for k in self.stats.keys():
@@ -168,11 +170,11 @@ class DetectionValidator(BaseValidator):
168
170
  def get_stats(self):
169
171
  """Returns metrics statistics and results dictionary."""
170
172
  stats = {k: torch.cat(v, 0).cpu().numpy() for k, v in self.stats.items()} # to numpy
173
+ self.nt_per_class = np.bincount(stats["target_cls"].astype(int), minlength=self.nc)
174
+ self.nt_per_image = np.bincount(stats["target_img"].astype(int), minlength=self.nc)
175
+ stats.pop("target_img", None)
171
176
  if len(stats) and stats["tp"].any():
172
177
  self.metrics.process(**stats)
173
- self.nt_per_class = np.bincount(
174
- stats["target_cls"].astype(int), minlength=self.nc
175
- ) # number of targets per class
176
178
  return self.metrics.results_dict
177
179
 
178
180
  def print_results(self):
@@ -185,7 +187,9 @@ class DetectionValidator(BaseValidator):
185
187
  # Print results per class
186
188
  if self.args.verbose and not self.training and self.nc > 1 and len(self.stats):
187
189
  for i, c in enumerate(self.metrics.ap_class_index):
188
- LOGGER.info(pf % (self.names[c], self.seen, self.nt_per_class[c], *self.metrics.class_result(i)))
190
+ LOGGER.info(
191
+ pf % (self.names[c], self.nt_per_image[c], self.nt_per_class[c], *self.metrics.class_result(i))
192
+ )
189
193
 
190
194
  if self.args.plots:
191
195
  for normalize in True, False:
@@ -81,7 +81,7 @@ class PoseValidator(DetectionValidator):
81
81
  is_pose = self.kpt_shape == [17, 3]
82
82
  nkpt = self.kpt_shape[0]
83
83
  self.sigma = OKS_SIGMA if is_pose else np.ones(nkpt) / nkpt
84
- self.stats = dict(tp_p=[], tp=[], conf=[], pred_cls=[], target_cls=[])
84
+ self.stats = dict(tp_p=[], tp=[], conf=[], pred_cls=[], target_cls=[], target_img=[])
85
85
 
86
86
  def _prepare_batch(self, si, batch):
87
87
  """Prepares a batch for processing by converting keypoints to float and moving to device."""
@@ -118,6 +118,7 @@ class PoseValidator(DetectionValidator):
118
118
  cls, bbox = pbatch.pop("cls"), pbatch.pop("bbox")
119
119
  nl = len(cls)
120
120
  stat["target_cls"] = cls
121
+ stat["target_img"] = cls.unique()
121
122
  if npr == 0:
122
123
  if nl:
123
124
  for k in self.stats.keys():
@@ -51,7 +51,7 @@ class SegmentationValidator(DetectionValidator):
51
51
  self.process = ops.process_mask_upsample # more accurate
52
52
  else:
53
53
  self.process = ops.process_mask # faster
54
- self.stats = dict(tp_m=[], tp=[], conf=[], pred_cls=[], target_cls=[])
54
+ self.stats = dict(tp_m=[], tp=[], conf=[], pred_cls=[], target_cls=[], target_img=[])
55
55
 
56
56
  def get_desc(self):
57
57
  """Return a formatted description of evaluation metrics."""
@@ -112,6 +112,7 @@ class SegmentationValidator(DetectionValidator):
112
112
  cls, bbox = pbatch.pop("cls"), pbatch.pop("bbox")
113
113
  nl = len(cls)
114
114
  stat["target_cls"] = cls
115
+ stat["target_img"] = cls.unique()
115
116
  if npr == 0:
116
117
  if nl:
117
118
  for k in self.stats.keys():
@@ -13,7 +13,7 @@ Example:
13
13
  m = Conv(128, 128)
14
14
  f = f'{m._get_name()}.onnx'
15
15
  torch.onnx.export(m, x, f)
16
- os.system(f'onnxsim {f} {f} && open {f}')
16
+ os.system(f'onnxslim {f} {f} && open {f}') # pip install onnxslim
17
17
  ```
18
18
  """
19
19
 
@@ -425,11 +425,11 @@ class ClassificationModel(BaseModel):
425
425
  elif isinstance(m, nn.Sequential):
426
426
  types = [type(x) for x in m]
427
427
  if nn.Linear in types:
428
- i = types.index(nn.Linear) # nn.Linear index
428
+ i = len(types) - 1 - types[::-1].index(nn.Linear) # last nn.Linear index
429
429
  if m[i].out_features != nc:
430
430
  m[i] = nn.Linear(m[i].in_features, nc)
431
431
  elif nn.Conv2d in types:
432
- i = types.index(nn.Conv2d) # nn.Conv2d index
432
+ i = len(types) - 1 - types[::-1].index(nn.Conv2d) # last nn.Conv2d index
433
433
  if m[i].out_channels != nc:
434
434
  m[i] = nn.Conv2d(m[i].in_channels, nc, m[i].kernel_size, m[i].stride, bias=m[i].bias is not None)
435
435
 
@@ -1,5 +1,6 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
+ import warnings
3
4
  from itertools import cycle
4
5
 
5
6
  import cv2
@@ -27,6 +28,7 @@ class Analytics:
27
28
  fontsize=13,
28
29
  view_img=False,
29
30
  save_img=True,
31
+ max_points=50,
30
32
  ):
31
33
  """
32
34
  Initialize the Analytics class with various chart types.
@@ -45,6 +47,7 @@ class Analytics:
45
47
  fontsize (int): Font size for chart text.
46
48
  view_img (bool): Whether to display the image.
47
49
  save_img (bool): Whether to save the image.
50
+ max_points (int): Specifies when to remove the oldest points in a graph for multiple lines.
48
51
  """
49
52
 
50
53
  self.bg_color = bg_color
@@ -53,12 +56,14 @@ class Analytics:
53
56
  self.save_img = save_img
54
57
  self.title = title
55
58
  self.writer = writer
59
+ self.max_points = max_points
56
60
 
57
61
  # Set figure size based on image shape
58
62
  figsize = (im0_shape[0] / 100, im0_shape[1] / 100)
59
63
 
60
64
  if type == "line":
61
65
  # Initialize line plot
66
+ self.lines = {}
62
67
  fig = Figure(facecolor=self.bg_color, figsize=figsize)
63
68
  self.canvas = FigureCanvas(fig)
64
69
  self.ax = fig.add_subplot(111, facecolor=self.bg_color)
@@ -112,9 +117,53 @@ class Analytics:
112
117
  self.ax.autoscale_view()
113
118
  self.canvas.draw()
114
119
  im0 = np.array(self.canvas.renderer.buffer_rgba())
115
- im0 = cv2.cvtColor(im0[:, :, :3], cv2.COLOR_RGBA2BGR)
120
+ self.write_and_display_line(im0)
116
121
 
117
- # Display and save the updated graph
122
+ def update_multiple_lines(self, counts_dict, labels_list, frame_number):
123
+ """
124
+ Update the line graph with multiple classes.
125
+
126
+ Args:
127
+ counts_dict (int): Dictionary include each class counts.
128
+ labels_list (int): list include each classes names.
129
+ frame_number (int): The current frame number.
130
+ """
131
+ warnings.warn("Display is not supported for multiple lines, output will be stored normally!")
132
+ for obj in labels_list:
133
+ if obj not in self.lines:
134
+ (line,) = self.ax.plot([], [], label=obj, marker="o", markersize=15)
135
+ self.lines[obj] = line
136
+
137
+ x_data = self.lines[obj].get_xdata()
138
+ y_data = self.lines[obj].get_ydata()
139
+
140
+ # Remove the initial point if the number of points exceeds max_points
141
+ if len(x_data) >= self.max_points:
142
+ x_data = np.delete(x_data, 0)
143
+ y_data = np.delete(y_data, 0)
144
+
145
+ x_data = np.append(x_data, float(frame_number)) # Ensure frame_number is converted to float
146
+ y_data = np.append(y_data, float(counts_dict.get(obj, 0))) # Ensure total_count is converted to float
147
+ self.lines[obj].set_data(x_data, y_data)
148
+
149
+ self.ax.relim()
150
+ self.ax.autoscale_view()
151
+ self.ax.legend()
152
+ self.canvas.draw()
153
+
154
+ im0 = np.array(self.canvas.renderer.buffer_rgba())
155
+ self.view_img = False # for multiple line view_img not supported yet, coming soon!
156
+ self.write_and_display_line(im0)
157
+
158
+ def write_and_display_line(self, im0):
159
+ """
160
+ Write and display the line graph
161
+ Args:
162
+ im0 (ndarray): Image for processing
163
+ """
164
+
165
+ # convert image to BGR format
166
+ im0 = cv2.cvtColor(im0[:, :, :3], cv2.COLOR_RGBA2BGR)
118
167
  cv2.imshow(self.title, im0) if self.view_img else None
119
168
  self.writer.write(im0) if self.save_img else None
120
169
 
@@ -457,6 +457,8 @@ class ProfileModels:
457
457
 
458
458
  input_tensor = sess.get_inputs()[0]
459
459
  input_type = input_tensor.type
460
+ dynamic = not all(isinstance(dim, int) and dim >= 0 for dim in input_tensor.shape) # dynamic input shape
461
+ input_shape = (1, 3, self.imgsz, self.imgsz) if dynamic else input_tensor.shape
460
462
 
461
463
  # Mapping ONNX datatype to numpy datatype
462
464
  if "float16" in input_type:
@@ -472,7 +474,7 @@ class ProfileModels:
472
474
  else:
473
475
  raise ValueError(f"Unsupported ONNX datatype {input_type}")
474
476
 
475
- input_data = np.random.rand(*input_tensor.shape).astype(input_dtype)
477
+ input_data = np.random.rand(*input_shape).astype(input_dtype)
476
478
  input_name = input_tensor.name
477
479
  output_name = sess.get_outputs()[0].name
478
480
 
@@ -33,6 +33,7 @@ from ultralytics.utils import (
33
33
  ROOT,
34
34
  TORCHVISION_VERSION,
35
35
  USER_CONFIG_DIR,
36
+ Retry,
36
37
  SimpleNamespace,
37
38
  ThreadingLocked,
38
39
  TryExcept,
@@ -381,6 +382,11 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
381
382
  except (AssertionError, metadata.PackageNotFoundError):
382
383
  pkgs.append(r)
383
384
 
385
+ @Retry(times=2, delay=1)
386
+ def attempt_install(packages, commands):
387
+ """Attempt pip install command with retries on failure."""
388
+ return subprocess.check_output(f"pip install --no-cache-dir {packages} {commands}", shell=True).decode()
389
+
384
390
  s = " ".join(f'"{x}"' for x in pkgs) # console string
385
391
  if s:
386
392
  if install and AUTOINSTALL: # check environment variable
@@ -389,7 +395,7 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
389
395
  try:
390
396
  t = time.time()
391
397
  assert ONLINE, "AutoUpdate skipped (offline)"
392
- LOGGER.info(subprocess.check_output(f"pip install --no-cache-dir {s} {cmds}", shell=True).decode())
398
+ LOGGER.info(attempt_install(s, cmds))
393
399
  dt = time.time() - t
394
400
  LOGGER.info(
395
401
  f"{prefix} AutoUpdate success ✅ {dt:.1f}s, installed {n} package{'s' * (n > 1)}: {pkgs}\n"
@@ -518,59 +518,58 @@ def ltwh2xywh(x):
518
518
  return y
519
519
 
520
520
 
521
- def xyxyxyxy2xywhr(corners):
521
+ def xyxyxyxy2xywhr(x):
522
522
  """
523
523
  Convert batched Oriented Bounding Boxes (OBB) from [xy1, xy2, xy3, xy4] to [xywh, rotation]. Rotation values are
524
524
  expected in degrees from 0 to 90.
525
525
 
526
526
  Args:
527
- corners (numpy.ndarray | torch.Tensor): Input corners of shape (n, 8).
527
+ x (numpy.ndarray | torch.Tensor): Input box corners [xy1, xy2, xy3, xy4] of shape (n, 8).
528
528
 
529
529
  Returns:
530
530
  (numpy.ndarray | torch.Tensor): Converted data in [cx, cy, w, h, rotation] format of shape (n, 5).
531
531
  """
532
- is_torch = isinstance(corners, torch.Tensor)
533
- points = corners.cpu().numpy() if is_torch else corners
534
- points = points.reshape(len(corners), -1, 2)
532
+ is_torch = isinstance(x, torch.Tensor)
533
+ points = x.cpu().numpy() if is_torch else x
534
+ points = points.reshape(len(x), -1, 2)
535
535
  rboxes = []
536
536
  for pts in points:
537
537
  # NOTE: Use cv2.minAreaRect to get accurate xywhr,
538
538
  # especially some objects are cut off by augmentations in dataloader.
539
- (x, y), (w, h), angle = cv2.minAreaRect(pts)
540
- rboxes.append([x, y, w, h, angle / 180 * np.pi])
541
- return (
542
- torch.tensor(rboxes, device=corners.device, dtype=corners.dtype)
543
- if is_torch
544
- else np.asarray(rboxes, dtype=points.dtype)
545
- ) # rboxes
539
+ (cx, cy), (w, h), angle = cv2.minAreaRect(pts)
540
+ rboxes.append([cx, cy, w, h, angle / 180 * np.pi])
541
+ return torch.tensor(rboxes, device=x.device, dtype=x.dtype) if is_torch else np.asarray(rboxes)
546
542
 
547
543
 
548
- def xywhr2xyxyxyxy(rboxes):
544
+ def xywhr2xyxyxyxy(x):
549
545
  """
550
546
  Convert batched Oriented Bounding Boxes (OBB) from [xywh, rotation] to [xy1, xy2, xy3, xy4]. Rotation values should
551
547
  be in degrees from 0 to 90.
552
548
 
553
549
  Args:
554
- rboxes (numpy.ndarray | torch.Tensor): Boxes in [cx, cy, w, h, rotation] format of shape (n, 5) or (b, n, 5).
550
+ x (numpy.ndarray | torch.Tensor): Boxes in [cx, cy, w, h, rotation] format of shape (n, 5) or (b, n, 5).
555
551
 
556
552
  Returns:
557
553
  (numpy.ndarray | torch.Tensor): Converted corner points of shape (n, 4, 2) or (b, n, 4, 2).
558
554
  """
559
- is_numpy = isinstance(rboxes, np.ndarray)
560
- cos, sin = (np.cos, np.sin) if is_numpy else (torch.cos, torch.sin)
555
+ cos, sin, cat, stack = (
556
+ (torch.cos, torch.sin, torch.cat, torch.stack)
557
+ if isinstance(x, torch.Tensor)
558
+ else (np.cos, np.sin, np.concatenate, np.stack)
559
+ )
561
560
 
562
- ctr = rboxes[..., :2]
563
- w, h, angle = (rboxes[..., i : i + 1] for i in range(2, 5))
561
+ ctr = x[..., :2]
562
+ w, h, angle = (x[..., i : i + 1] for i in range(2, 5))
564
563
  cos_value, sin_value = cos(angle), sin(angle)
565
564
  vec1 = [w / 2 * cos_value, w / 2 * sin_value]
566
565
  vec2 = [-h / 2 * sin_value, h / 2 * cos_value]
567
- vec1 = np.concatenate(vec1, axis=-1) if is_numpy else torch.cat(vec1, dim=-1)
568
- vec2 = np.concatenate(vec2, axis=-1) if is_numpy else torch.cat(vec2, dim=-1)
566
+ vec1 = cat(vec1, -1)
567
+ vec2 = cat(vec2, -1)
569
568
  pt1 = ctr + vec1 + vec2
570
569
  pt2 = ctr + vec1 - vec2
571
570
  pt3 = ctr - vec1 - vec2
572
571
  pt4 = ctr - vec1 + vec2
573
- return np.stack([pt1, pt2, pt3, pt4], axis=-2) if is_numpy else torch.stack([pt1, pt2, pt3, pt4], dim=-2)
572
+ return stack([pt1, pt2, pt3, pt4], -2)
574
573
 
575
574
 
576
575
  def ltwh2xyxy(x):
@@ -785,7 +784,7 @@ def regularize_rboxes(rboxes):
785
784
  Regularize rotated boxes in range [0, pi/2].
786
785
 
787
786
  Args:
788
- rboxes (torch.Tensor): (N, 5), xywhr.
787
+ rboxes (torch.Tensor): Input boxes of shape(N, 5) in xywhr format.
789
788
 
790
789
  Returns:
791
790
  (torch.Tensor): The regularized boxes.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.2.25
3
+ Version: 8.2.27
4
4
  Summary: Ultralytics YOLOv8 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author: Glenn Jocher, Ayush Chaurasia, Jing Qiu
6
6
  Maintainer: Glenn Jocher, Ayush Chaurasia, Jing Qiu
File without changes
File without changes
File without changes