ultralytics-opencv-headless 8.4.0__tar.gz → 8.4.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (315) hide show
  1. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/PKG-INFO +36 -36
  2. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/README.md +35 -35
  3. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/tests/test_engine.py +2 -2
  4. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/tests/test_exports.py +3 -3
  5. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/__init__.py +1 -1
  6. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/__init__.py +19 -21
  7. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26-cls.yaml +2 -2
  8. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26-obb.yaml +2 -2
  9. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26-p2.yaml +11 -11
  10. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26-p6.yaml +8 -6
  11. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26-pose.yaml +2 -2
  12. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26-seg.yaml +2 -2
  13. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26.yaml +1 -1
  14. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yoloe-26-seg.yaml +10 -10
  15. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yoloe-26.yaml +10 -10
  16. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/annotator.py +2 -2
  17. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/converter.py +57 -38
  18. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/exporter.py +25 -27
  19. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/model.py +33 -33
  20. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/predictor.py +17 -17
  21. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/results.py +14 -12
  22. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/trainer.py +27 -22
  23. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/tuner.py +4 -4
  24. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/validator.py +16 -16
  25. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/classify/predict.py +1 -1
  26. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/classify/train.py +1 -1
  27. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/classify/val.py +1 -1
  28. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/detect/predict.py +2 -2
  29. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/detect/train.py +1 -1
  30. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/detect/val.py +1 -1
  31. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/model.py +7 -7
  32. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/obb/predict.py +1 -1
  33. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/obb/train.py +2 -2
  34. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/obb/val.py +1 -1
  35. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/pose/predict.py +1 -1
  36. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/pose/train.py +4 -2
  37. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/pose/val.py +1 -1
  38. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/segment/predict.py +2 -2
  39. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/segment/train.py +3 -3
  40. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/segment/val.py +1 -1
  41. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/autobackend.py +2 -6
  42. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/block.py +1 -0
  43. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/head.py +6 -34
  44. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/tasks.py +14 -14
  45. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/ai_gym.py +3 -3
  46. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/config.py +1 -1
  47. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/heatmap.py +1 -1
  48. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/instance_segmentation.py +2 -2
  49. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/parking_management.py +1 -1
  50. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/solutions.py +2 -2
  51. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/track.py +1 -1
  52. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/__init__.py +8 -8
  53. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/benchmarks.py +23 -24
  54. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/platform.py +11 -9
  55. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/checks.py +6 -6
  56. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/downloads.py +2 -2
  57. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/export/imx.py +3 -8
  58. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/export/tensorflow.py +40 -0
  59. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/files.py +2 -2
  60. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/loss.py +10 -7
  61. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/tuner.py +2 -2
  62. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics_opencv_headless.egg-info/PKG-INFO +36 -36
  63. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/LICENSE +0 -0
  64. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/pyproject.toml +0 -0
  65. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/setup.cfg +0 -0
  66. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/tests/__init__.py +0 -0
  67. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/tests/conftest.py +0 -0
  68. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/tests/test_cli.py +0 -0
  69. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/tests/test_cuda.py +0 -0
  70. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/tests/test_integrations.py +0 -0
  71. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/tests/test_python.py +0 -0
  72. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/tests/test_solutions.py +0 -0
  73. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/assets/bus.jpg +0 -0
  74. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/assets/zidane.jpg +0 -0
  75. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/Argoverse.yaml +0 -0
  76. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/DOTAv1.5.yaml +0 -0
  77. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/DOTAv1.yaml +0 -0
  78. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/GlobalWheat2020.yaml +0 -0
  79. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/HomeObjects-3K.yaml +0 -0
  80. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/ImageNet.yaml +0 -0
  81. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/Objects365.yaml +0 -0
  82. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/SKU-110K.yaml +0 -0
  83. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/TT100K.yaml +0 -0
  84. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/VOC.yaml +0 -0
  85. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/VisDrone.yaml +0 -0
  86. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/african-wildlife.yaml +0 -0
  87. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/brain-tumor.yaml +0 -0
  88. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/carparts-seg.yaml +0 -0
  89. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco-pose.yaml +0 -0
  90. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco.yaml +0 -0
  91. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco128-seg.yaml +0 -0
  92. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco128.yaml +0 -0
  93. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco8-grayscale.yaml +0 -0
  94. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco8-multispectral.yaml +0 -0
  95. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco8-pose.yaml +0 -0
  96. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco8-seg.yaml +0 -0
  97. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco8.yaml +0 -0
  98. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/construction-ppe.yaml +0 -0
  99. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/crack-seg.yaml +0 -0
  100. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/dog-pose.yaml +0 -0
  101. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/dota8-multispectral.yaml +0 -0
  102. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/dota8.yaml +0 -0
  103. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/hand-keypoints.yaml +0 -0
  104. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/kitti.yaml +0 -0
  105. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/lvis.yaml +0 -0
  106. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/medical-pills.yaml +0 -0
  107. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/open-images-v7.yaml +0 -0
  108. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/package-seg.yaml +0 -0
  109. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/signature.yaml +0 -0
  110. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/tiger-pose.yaml +0 -0
  111. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/xView.yaml +0 -0
  112. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/default.yaml +0 -0
  113. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yolo11-cls-resnet18.yaml +0 -0
  114. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yolo11-cls.yaml +0 -0
  115. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yolo11-obb.yaml +0 -0
  116. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yolo11-pose.yaml +0 -0
  117. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yolo11-seg.yaml +0 -0
  118. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yolo11.yaml +0 -0
  119. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yoloe-11-seg.yaml +0 -0
  120. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yoloe-11.yaml +0 -0
  121. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/12/yolo12-cls.yaml +0 -0
  122. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/12/yolo12-obb.yaml +0 -0
  123. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/12/yolo12-pose.yaml +0 -0
  124. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/12/yolo12-seg.yaml +0 -0
  125. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/12/yolo12.yaml +0 -0
  126. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +0 -0
  127. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +0 -0
  128. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +0 -0
  129. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +0 -0
  130. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v10/yolov10b.yaml +0 -0
  131. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v10/yolov10l.yaml +0 -0
  132. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v10/yolov10m.yaml +0 -0
  133. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v10/yolov10n.yaml +0 -0
  134. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v10/yolov10s.yaml +0 -0
  135. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v10/yolov10x.yaml +0 -0
  136. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v3/yolov3-spp.yaml +0 -0
  137. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v3/yolov3-tiny.yaml +0 -0
  138. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v3/yolov3.yaml +0 -0
  139. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v5/yolov5-p6.yaml +0 -0
  140. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v5/yolov5.yaml +0 -0
  141. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v6/yolov6.yaml +0 -0
  142. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yoloe-v8-seg.yaml +0 -0
  143. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yoloe-v8.yaml +0 -0
  144. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +0 -0
  145. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +0 -0
  146. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-cls.yaml +0 -0
  147. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +0 -0
  148. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +0 -0
  149. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-ghost.yaml +0 -0
  150. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-obb.yaml +0 -0
  151. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-p2.yaml +0 -0
  152. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-p6.yaml +0 -0
  153. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +0 -0
  154. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-pose.yaml +0 -0
  155. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +0 -0
  156. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +0 -0
  157. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-seg.yaml +0 -0
  158. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-world.yaml +0 -0
  159. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-worldv2.yaml +0 -0
  160. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8.yaml +0 -0
  161. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9c-seg.yaml +0 -0
  162. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9c.yaml +0 -0
  163. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9e-seg.yaml +0 -0
  164. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9e.yaml +0 -0
  165. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9m.yaml +0 -0
  166. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9s.yaml +0 -0
  167. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9t.yaml +0 -0
  168. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/trackers/botsort.yaml +0 -0
  169. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/trackers/bytetrack.yaml +0 -0
  170. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/__init__.py +0 -0
  171. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/augment.py +0 -0
  172. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/base.py +0 -0
  173. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/build.py +0 -0
  174. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/dataset.py +0 -0
  175. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/loaders.py +0 -0
  176. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/scripts/download_weights.sh +0 -0
  177. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/scripts/get_coco.sh +0 -0
  178. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/scripts/get_coco128.sh +0 -0
  179. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/scripts/get_imagenet.sh +0 -0
  180. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/split.py +0 -0
  181. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/split_dota.py +0 -0
  182. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/utils.py +0 -0
  183. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/__init__.py +0 -0
  184. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/hub/__init__.py +0 -0
  185. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/hub/auth.py +0 -0
  186. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/hub/google/__init__.py +0 -0
  187. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/hub/session.py +0 -0
  188. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/hub/utils.py +0 -0
  189. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/__init__.py +0 -0
  190. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/fastsam/__init__.py +0 -0
  191. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/fastsam/model.py +0 -0
  192. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/fastsam/predict.py +0 -0
  193. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/fastsam/utils.py +0 -0
  194. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/fastsam/val.py +0 -0
  195. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/nas/__init__.py +0 -0
  196. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/nas/model.py +0 -0
  197. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/nas/predict.py +0 -0
  198. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/nas/val.py +0 -0
  199. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/rtdetr/__init__.py +0 -0
  200. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/rtdetr/model.py +0 -0
  201. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/rtdetr/predict.py +0 -0
  202. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/rtdetr/train.py +0 -0
  203. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/rtdetr/val.py +0 -0
  204. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/__init__.py +0 -0
  205. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/amg.py +0 -0
  206. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/build.py +0 -0
  207. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/build_sam3.py +0 -0
  208. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/model.py +0 -0
  209. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/__init__.py +0 -0
  210. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/blocks.py +0 -0
  211. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/decoders.py +0 -0
  212. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/encoders.py +0 -0
  213. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/memory_attention.py +0 -0
  214. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/sam.py +0 -0
  215. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/tiny_encoder.py +0 -0
  216. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/transformer.py +0 -0
  217. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/utils.py +0 -0
  218. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/predict.py +0 -0
  219. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/__init__.py +0 -0
  220. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/decoder.py +0 -0
  221. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/encoder.py +0 -0
  222. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/geometry_encoders.py +0 -0
  223. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/maskformer_segmentation.py +0 -0
  224. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/model_misc.py +0 -0
  225. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/necks.py +0 -0
  226. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/sam3_image.py +0 -0
  227. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/text_encoder_ve.py +0 -0
  228. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/vitdet.py +0 -0
  229. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/vl_combiner.py +0 -0
  230. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/utils/__init__.py +0 -0
  231. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/utils/loss.py +0 -0
  232. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/utils/ops.py +0 -0
  233. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/__init__.py +0 -0
  234. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/classify/__init__.py +0 -0
  235. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/detect/__init__.py +0 -0
  236. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/obb/__init__.py +0 -0
  237. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/pose/__init__.py +0 -0
  238. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/segment/__init__.py +0 -0
  239. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/world/__init__.py +0 -0
  240. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/world/train.py +0 -0
  241. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/world/train_world.py +0 -0
  242. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/yoloe/__init__.py +0 -0
  243. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/yoloe/predict.py +0 -0
  244. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/yoloe/train.py +0 -0
  245. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/yoloe/train_seg.py +0 -0
  246. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/yoloe/val.py +0 -0
  247. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/__init__.py +0 -0
  248. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/__init__.py +0 -0
  249. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/activation.py +0 -0
  250. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/conv.py +0 -0
  251. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/transformer.py +0 -0
  252. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/utils.py +0 -0
  253. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/text_model.py +0 -0
  254. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/optim/__init__.py +0 -0
  255. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/optim/muon.py +0 -0
  256. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/py.typed +0 -0
  257. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/__init__.py +0 -0
  258. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/analytics.py +0 -0
  259. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/distance_calculation.py +0 -0
  260. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/object_blurrer.py +0 -0
  261. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/object_counter.py +0 -0
  262. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/object_cropper.py +0 -0
  263. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/queue_management.py +0 -0
  264. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/region_counter.py +0 -0
  265. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/security_alarm.py +0 -0
  266. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/similarity_search.py +0 -0
  267. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/speed_estimation.py +0 -0
  268. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/streamlit_inference.py +0 -0
  269. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/templates/similarity-search.html +0 -0
  270. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/trackzone.py +0 -0
  271. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/vision_eye.py +0 -0
  272. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/__init__.py +0 -0
  273. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/basetrack.py +0 -0
  274. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/bot_sort.py +0 -0
  275. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/byte_tracker.py +0 -0
  276. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/utils/__init__.py +0 -0
  277. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/utils/gmc.py +0 -0
  278. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/utils/kalman_filter.py +0 -0
  279. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/utils/matching.py +0 -0
  280. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/autobatch.py +0 -0
  281. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/autodevice.py +0 -0
  282. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/__init__.py +0 -0
  283. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/base.py +0 -0
  284. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/clearml.py +0 -0
  285. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/comet.py +0 -0
  286. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/dvc.py +0 -0
  287. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/hub.py +0 -0
  288. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/mlflow.py +0 -0
  289. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/neptune.py +0 -0
  290. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/raytune.py +0 -0
  291. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/tensorboard.py +0 -0
  292. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/wb.py +0 -0
  293. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/cpu.py +0 -0
  294. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/dist.py +0 -0
  295. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/errors.py +0 -0
  296. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/events.py +0 -0
  297. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/export/__init__.py +0 -0
  298. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/export/engine.py +0 -0
  299. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/git.py +0 -0
  300. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/instance.py +0 -0
  301. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/logger.py +0 -0
  302. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/metrics.py +0 -0
  303. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/nms.py +0 -0
  304. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/ops.py +0 -0
  305. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/patches.py +0 -0
  306. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/plotting.py +0 -0
  307. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/tal.py +0 -0
  308. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/torch_utils.py +0 -0
  309. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/tqdm.py +0 -0
  310. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/triton.py +0 -0
  311. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics_opencv_headless.egg-info/SOURCES.txt +0 -0
  312. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics_opencv_headless.egg-info/dependency_links.txt +0 -0
  313. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics_opencv_headless.egg-info/entry_points.txt +0 -0
  314. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics_opencv_headless.egg-info/requires.txt +0 -0
  315. {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.3}/ultralytics_opencv_headless.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ultralytics-opencv-headless
3
- Version: 8.4.0
3
+ Version: 8.4.3
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -218,13 +218,13 @@ Ultralytics supports a wide range of YOLO models, from early versions like [YOLO
218
218
 
219
219
  Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examples. These models are trained on the [COCO dataset](https://cocodataset.org/), featuring 80 object classes.
220
220
 
221
- | Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
222
- | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
223
- | [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640 | 40.9 | 38.9 ± 0.7 | 1.7 ± 0.0 | 2.4 | 5.4 |
224
- | [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640 | 48.6 | 87.2 ± 0.9 | 2.5 ± 0.0 | 9.5 | 20.7 |
225
- | [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640 | 53.1 | 220.0 ± 1.4 | 4.7 ± 0.1 | 20.4 | 68.2 |
226
- | [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640 | 55.0 | 286.2 ± 2.0 | 6.2 ± 0.2 | 24.8 | 86.4 |
227
- | [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640 | 57.5 | 525.8 ± 4.0 | 11.8 ± 0.2 | 55.7 | 193.9 |
221
+ | Model | size<br><sup>(pixels)</sup> | mAP<sup>val<br>50-95</sup> | mAP<sup>val<br>50-95(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
222
+ | ------------------------------------------------------------------------------------ | --------------------------- | -------------------------- | ------------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
223
+ | [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640 | 40.9 | 40.1 | 38.9 ± 0.7 | 1.7 ± 0.0 | 2.4 | 5.4 |
224
+ | [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640 | 48.6 | 47.8 | 87.2 ± 0.9 | 2.5 ± 0.0 | 9.5 | 20.7 |
225
+ | [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640 | 53.1 | 52.5 | 220.0 ± 1.4 | 4.7 ± 0.1 | 20.4 | 68.2 |
226
+ | [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640 | 55.0 | 54.4 | 286.2 ± 2.0 | 6.2 ± 0.2 | 24.8 | 86.4 |
227
+ | [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640 | 57.5 | 56.9 | 525.8 ± 4.0 | 11.8 ± 0.2 | 55.7 | 193.9 |
228
228
 
229
229
  - **mAP<sup>val</sup>** values refer to single-model single-scale performance on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val detect data=coco.yaml device=0`
230
230
  - **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val detect data=coco.yaml batch=1 device=0|cpu`
@@ -235,13 +235,13 @@ Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usa
235
235
 
236
236
  Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage examples. These models are trained on [COCO-Seg](https://docs.ultralytics.com/datasets/segment/coco/), including 80 classes.
237
237
 
238
- | Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
239
- | -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
240
- | [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.8 | 9.1 |
241
- | [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.7 | 34.2 |
242
- | [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 24.8 | 121.5 |
243
- | [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 29.2 | 139.8 |
244
- | [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 65.5 | 313.5 |
238
+ | Model | size<br><sup>(pixels)</sup> | mAP<sup>box<br>50-95(e2e)</sup> | mAP<sup>mask<br>50-95(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
239
+ | -------------------------------------------------------------------------------------------- | --------------------------- | ------------------------------- | -------------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
240
+ | [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.7 | 9.1 |
241
+ | [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.4 | 34.2 |
242
+ | [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 23.6 | 121.5 |
243
+ | [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 28.0 | 139.8 |
244
+ | [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 62.8 | 313.5 |
245
245
 
246
246
  - **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val segment data=coco.yaml device=0`
247
247
  - **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val segment data=coco.yaml batch=1 device=0|cpu`
@@ -252,13 +252,13 @@ Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) fo
252
252
 
253
253
  Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usage examples. These models are trained on [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), covering 1000 classes.
254
254
 
255
- | Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) at 224 |
256
- | -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ |
257
- | [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224 | 71.4 | 90.1 | 5.0 ± 0.3 | 1.1 ± 0.0 | 2.8 | 0.5 |
258
- | [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224 | 76.0 | 92.9 | 7.9 ± 0.2 | 1.3 ± 0.0 | 6.7 | 1.6 |
259
- | [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224 | 78.1 | 94.2 | 17.2 ± 0.4 | 2.0 ± 0.0 | 11.6 | 4.9 |
260
- | [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224 | 79.0 | 94.6 | 23.2 ± 0.3 | 2.8 ± 0.0 | 14.1 | 6.2 |
261
- | [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224 | 79.9 | 95.0 | 41.4 ± 0.9 | 3.8 ± 0.0 | 29.6 | 13.6 |
255
+ | Model | size<br><sup>(pixels)</sup> | acc<br><sup>top1</sup> | acc<br><sup>top5</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B) at 224</sup> |
256
+ | -------------------------------------------------------------------------------------------- | --------------------------- | ---------------------- | ---------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ------------------------------ |
257
+ | [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224 | 71.4 | 90.1 | 5.0 ± 0.3 | 1.1 ± 0.0 | 2.8 | 0.5 |
258
+ | [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224 | 76.0 | 92.9 | 7.9 ± 0.2 | 1.3 ± 0.0 | 6.7 | 1.6 |
259
+ | [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224 | 78.1 | 94.2 | 17.2 ± 0.4 | 2.0 ± 0.0 | 11.6 | 4.9 |
260
+ | [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224 | 79.0 | 94.6 | 23.2 ± 0.3 | 2.8 ± 0.0 | 14.1 | 6.2 |
261
+ | [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224 | 79.9 | 95.0 | 41.4 ± 0.9 | 3.8 ± 0.0 | 29.6 | 13.6 |
262
262
 
263
263
  - **acc** values represent model accuracy on the [ImageNet](https://www.image-net.org/) dataset validation set. <br>Reproduce with `yolo val classify data=path/to/ImageNet device=0`
264
264
  - **Speed** metrics are averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`
@@ -269,13 +269,13 @@ Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/)
269
269
 
270
270
  See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples. These models are trained on [COCO-Pose](https://docs.ultralytics.com/datasets/pose/coco/), focusing on the 'person' class.
271
271
 
272
- | Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
273
- | ---------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
274
- | [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 56.9 | 83.0 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
275
- | [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.1 | 86.8 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
276
- | [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.9 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
277
- | [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.8 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
278
- | [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.7 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
272
+ | Model | size<br><sup>(pixels)</sup> | mAP<sup>pose<br>50-95(e2e)</sup> | mAP<sup>pose<br>50(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
273
+ | ---------------------------------------------------------------------------------------------- | --------------------------- | -------------------------------- | ----------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
274
+ | [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 57.2 | 83.3 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
275
+ | [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.0 | 86.6 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
276
+ | [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.6 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
277
+ | [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.5 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
278
+ | [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.6 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
279
279
 
280
280
  - **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO Keypoints val2017](https://docs.ultralytics.com/datasets/pose/coco/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val pose data=coco-pose.yaml device=0`
281
281
  - **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
@@ -286,13 +286,13 @@ See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usa
286
286
 
287
287
  Check the [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples. These models are trained on [DOTAv1](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10/), including 15 classes.
288
288
 
289
- | Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
290
- | -------------------------------------------------------------------------------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
291
- | [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
292
- | [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 79.8 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
293
- | [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
294
- | [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 81.4 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
295
- | [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 82.1 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
289
+ | Model | size<br><sup>(pixels)</sup> | mAP<sup>test<br>50-95(e2e)</sup> | mAP<sup>test<br>50(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
290
+ | -------------------------------------------------------------------------------------------- | --------------------------- | -------------------------------- | ----------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
291
+ | [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 52.4 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
292
+ | [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 54.8 | 80.9 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
293
+ | [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 55.3 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
294
+ | [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 56.2 | 81.6 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
295
+ | [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 56.7 | 81.7 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
296
296
 
297
297
  - **mAP<sup>test</sup>** values are for single-model multiscale performance on the [DOTAv1 test set](https://captain-whu.github.io/DOTA/dataset.html). <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to the [DOTA evaluation server](https://captain-whu.github.io/DOTA/evaluation.html).
298
298
  - **Speed** metrics are averaged over [DOTAv1 val images](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10) using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
@@ -129,13 +129,13 @@ Ultralytics supports a wide range of YOLO models, from early versions like [YOLO
129
129
 
130
130
  Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examples. These models are trained on the [COCO dataset](https://cocodataset.org/), featuring 80 object classes.
131
131
 
132
- | Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
133
- | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
134
- | [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640 | 40.9 | 38.9 ± 0.7 | 1.7 ± 0.0 | 2.4 | 5.4 |
135
- | [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640 | 48.6 | 87.2 ± 0.9 | 2.5 ± 0.0 | 9.5 | 20.7 |
136
- | [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640 | 53.1 | 220.0 ± 1.4 | 4.7 ± 0.1 | 20.4 | 68.2 |
137
- | [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640 | 55.0 | 286.2 ± 2.0 | 6.2 ± 0.2 | 24.8 | 86.4 |
138
- | [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640 | 57.5 | 525.8 ± 4.0 | 11.8 ± 0.2 | 55.7 | 193.9 |
132
+ | Model | size<br><sup>(pixels)</sup> | mAP<sup>val<br>50-95</sup> | mAP<sup>val<br>50-95(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
133
+ | ------------------------------------------------------------------------------------ | --------------------------- | -------------------------- | ------------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
134
+ | [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640 | 40.9 | 40.1 | 38.9 ± 0.7 | 1.7 ± 0.0 | 2.4 | 5.4 |
135
+ | [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640 | 48.6 | 47.8 | 87.2 ± 0.9 | 2.5 ± 0.0 | 9.5 | 20.7 |
136
+ | [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640 | 53.1 | 52.5 | 220.0 ± 1.4 | 4.7 ± 0.1 | 20.4 | 68.2 |
137
+ | [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640 | 55.0 | 54.4 | 286.2 ± 2.0 | 6.2 ± 0.2 | 24.8 | 86.4 |
138
+ | [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640 | 57.5 | 56.9 | 525.8 ± 4.0 | 11.8 ± 0.2 | 55.7 | 193.9 |
139
139
 
140
140
  - **mAP<sup>val</sup>** values refer to single-model single-scale performance on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val detect data=coco.yaml device=0`
141
141
  - **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val detect data=coco.yaml batch=1 device=0|cpu`
@@ -146,13 +146,13 @@ Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usa
146
146
 
147
147
  Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage examples. These models are trained on [COCO-Seg](https://docs.ultralytics.com/datasets/segment/coco/), including 80 classes.
148
148
 
149
- | Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
150
- | -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
151
- | [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.8 | 9.1 |
152
- | [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.7 | 34.2 |
153
- | [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 24.8 | 121.5 |
154
- | [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 29.2 | 139.8 |
155
- | [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 65.5 | 313.5 |
149
+ | Model | size<br><sup>(pixels)</sup> | mAP<sup>box<br>50-95(e2e)</sup> | mAP<sup>mask<br>50-95(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
150
+ | -------------------------------------------------------------------------------------------- | --------------------------- | ------------------------------- | -------------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
151
+ | [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.7 | 9.1 |
152
+ | [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.4 | 34.2 |
153
+ | [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 23.6 | 121.5 |
154
+ | [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 28.0 | 139.8 |
155
+ | [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 62.8 | 313.5 |
156
156
 
157
157
  - **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val segment data=coco.yaml device=0`
158
158
  - **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val segment data=coco.yaml batch=1 device=0|cpu`
@@ -163,13 +163,13 @@ Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) fo
163
163
 
164
164
  Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usage examples. These models are trained on [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), covering 1000 classes.
165
165
 
166
- | Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) at 224 |
167
- | -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ |
168
- | [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224 | 71.4 | 90.1 | 5.0 ± 0.3 | 1.1 ± 0.0 | 2.8 | 0.5 |
169
- | [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224 | 76.0 | 92.9 | 7.9 ± 0.2 | 1.3 ± 0.0 | 6.7 | 1.6 |
170
- | [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224 | 78.1 | 94.2 | 17.2 ± 0.4 | 2.0 ± 0.0 | 11.6 | 4.9 |
171
- | [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224 | 79.0 | 94.6 | 23.2 ± 0.3 | 2.8 ± 0.0 | 14.1 | 6.2 |
172
- | [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224 | 79.9 | 95.0 | 41.4 ± 0.9 | 3.8 ± 0.0 | 29.6 | 13.6 |
166
+ | Model | size<br><sup>(pixels)</sup> | acc<br><sup>top1</sup> | acc<br><sup>top5</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B) at 224</sup> |
167
+ | -------------------------------------------------------------------------------------------- | --------------------------- | ---------------------- | ---------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ------------------------------ |
168
+ | [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224 | 71.4 | 90.1 | 5.0 ± 0.3 | 1.1 ± 0.0 | 2.8 | 0.5 |
169
+ | [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224 | 76.0 | 92.9 | 7.9 ± 0.2 | 1.3 ± 0.0 | 6.7 | 1.6 |
170
+ | [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224 | 78.1 | 94.2 | 17.2 ± 0.4 | 2.0 ± 0.0 | 11.6 | 4.9 |
171
+ | [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224 | 79.0 | 94.6 | 23.2 ± 0.3 | 2.8 ± 0.0 | 14.1 | 6.2 |
172
+ | [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224 | 79.9 | 95.0 | 41.4 ± 0.9 | 3.8 ± 0.0 | 29.6 | 13.6 |
173
173
 
174
174
  - **acc** values represent model accuracy on the [ImageNet](https://www.image-net.org/) dataset validation set. <br>Reproduce with `yolo val classify data=path/to/ImageNet device=0`
175
175
  - **Speed** metrics are averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`
@@ -180,13 +180,13 @@ Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/)
180
180
 
181
181
  See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples. These models are trained on [COCO-Pose](https://docs.ultralytics.com/datasets/pose/coco/), focusing on the 'person' class.
182
182
 
183
- | Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
184
- | ---------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
185
- | [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 56.9 | 83.0 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
186
- | [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.1 | 86.8 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
187
- | [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.9 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
188
- | [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.8 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
189
- | [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.7 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
183
+ | Model | size<br><sup>(pixels)</sup> | mAP<sup>pose<br>50-95(e2e)</sup> | mAP<sup>pose<br>50(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
184
+ | ---------------------------------------------------------------------------------------------- | --------------------------- | -------------------------------- | ----------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
185
+ | [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 57.2 | 83.3 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
186
+ | [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.0 | 86.6 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
187
+ | [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.6 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
188
+ | [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.5 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
189
+ | [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.6 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
190
190
 
191
191
  - **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO Keypoints val2017](https://docs.ultralytics.com/datasets/pose/coco/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val pose data=coco-pose.yaml device=0`
192
192
  - **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
@@ -197,13 +197,13 @@ See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usa
197
197
 
198
198
  Check the [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples. These models are trained on [DOTAv1](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10/), including 15 classes.
199
199
 
200
- | Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
201
- | -------------------------------------------------------------------------------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
202
- | [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
203
- | [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 79.8 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
204
- | [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
205
- | [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 81.4 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
206
- | [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 82.1 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
200
+ | Model | size<br><sup>(pixels)</sup> | mAP<sup>test<br>50-95(e2e)</sup> | mAP<sup>test<br>50(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
201
+ | -------------------------------------------------------------------------------------------- | --------------------------- | -------------------------------- | ----------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
202
+ | [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 52.4 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
203
+ | [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 54.8 | 80.9 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
204
+ | [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 55.3 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
205
+ | [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 56.2 | 81.6 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
206
+ | [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 56.7 | 81.7 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
207
207
 
208
208
  - **mAP<sup>test</sup>** values are for single-model multiscale performance on the [DOTAv1 test set](https://captain-whu.github.io/DOTA/dataset.html). <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to the [DOTA evaluation server](https://captain-whu.github.io/DOTA/evaluation.html).
209
209
  - **Speed** metrics are averaged over [DOTAv1 val images](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10) using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
@@ -5,7 +5,7 @@ from unittest import mock
5
5
 
6
6
  import torch
7
7
 
8
- from tests import MODEL
8
+ from tests import MODEL, SOURCE
9
9
  from ultralytics import YOLO
10
10
  from ultralytics.cfg import get_cfg
11
11
  from ultralytics.engine.exporter import Exporter
@@ -24,7 +24,7 @@ def test_export():
24
24
  exporter.add_callback("on_export_start", test_func)
25
25
  assert test_func in exporter.callbacks["on_export_start"], "callback test failed"
26
26
  f = exporter(model=YOLO("yolo26n.yaml").model)
27
- YOLO(f)(ASSETS) # exported model inference
27
+ YOLO(f)(SOURCE) # exported model inference
28
28
 
29
29
 
30
30
  def test_detect():
@@ -144,7 +144,9 @@ def test_export_coreml_matrix(task, dynamic, int8, half, nms, batch):
144
144
 
145
145
 
146
146
  @pytest.mark.slow
147
- @pytest.mark.skipif(not checks.IS_PYTHON_MINIMUM_3_10, reason="TFLite export requires Python>=3.10")
147
+ @pytest.mark.skipif(
148
+ not checks.IS_PYTHON_MINIMUM_3_10 or not TORCH_1_13, reason="TFLite export requires Python>=3.10 and torch>=1.13"
149
+ )
148
150
  @pytest.mark.skipif(
149
151
  not LINUX or IS_RASPBERRYPI,
150
152
  reason="Test disabled as TF suffers from install conflicts on Windows, macOS and Raspberry Pi",
@@ -238,7 +240,6 @@ def test_export_mnn_matrix(task, int8, half, batch):
238
240
 
239
241
 
240
242
  @pytest.mark.slow
241
- @pytest.mark.skipif(ARM64, reason="NCNN not supported on ARM64") # https://github.com/Tencent/ncnn/issues/6509
242
243
  @pytest.mark.skipif(not TORCH_2_0, reason="NCNN inference causes segfault on PyTorch<2.0")
243
244
  def test_export_ncnn():
244
245
  """Test YOLO export to NCNN format."""
@@ -247,7 +248,6 @@ def test_export_ncnn():
247
248
 
248
249
 
249
250
  @pytest.mark.slow
250
- @pytest.mark.skipif(ARM64, reason="NCNN not supported on ARM64") # https://github.com/Tencent/ncnn/issues/6509
251
251
  @pytest.mark.skipif(not TORCH_2_0, reason="NCNN inference causes segfault on PyTorch<2.0")
252
252
  @pytest.mark.parametrize("task, half, batch", list(product(TASKS, [True, False], [1])))
253
253
  def test_export_ncnn_matrix(task, half, batch):
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- __version__ = "8.4.0"
3
+ __version__ = "8.4.3"
4
4
 
5
5
  import importlib
6
6
  import os
@@ -90,13 +90,13 @@ SOLUTIONS_HELP_MSG = f"""
90
90
  yolo solutions count source="path/to/video.mp4" region="[(20, 400), (1080, 400), (1080, 360), (20, 360)]"
91
91
 
92
92
  2. Call heatmap solution
93
- yolo solutions heatmap colormap=cv2.COLORMAP_PARULA model=yolo11n.pt
93
+ yolo solutions heatmap colormap=cv2.COLORMAP_PARULA model=yolo26n.pt
94
94
 
95
95
  3. Call queue management solution
96
- yolo solutions queue region="[(20, 400), (1080, 400), (1080, 360), (20, 360)]" model=yolo11n.pt
96
+ yolo solutions queue region="[(20, 400), (1080, 400), (1080, 360), (20, 360)]" model=yolo26n.pt
97
97
 
98
98
  4. Call workout monitoring solution for push-ups
99
- yolo solutions workout model=yolo11n-pose.pt kpts=[6, 8, 10]
99
+ yolo solutions workout model=yolo26n-pose.pt kpts=[6, 8, 10]
100
100
 
101
101
  5. Generate analytical graphs
102
102
  yolo solutions analytics analytics_type="pie"
@@ -118,16 +118,16 @@ CLI_HELP_MSG = f"""
118
118
  See all ARGS at https://docs.ultralytics.com/usage/cfg or with 'yolo cfg'
119
119
 
120
120
  1. Train a detection model for 10 epochs with an initial learning_rate of 0.01
121
- yolo train data=coco8.yaml model=yolo11n.pt epochs=10 lr0=0.01
121
+ yolo train data=coco8.yaml model=yolo26n.pt epochs=10 lr0=0.01
122
122
 
123
123
  2. Predict a YouTube video using a pretrained segmentation model at image size 320:
124
- yolo predict model=yolo11n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320
124
+ yolo predict model=yolo26n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320
125
125
 
126
126
  3. Validate a pretrained detection model at batch-size 1 and image size 640:
127
- yolo val model=yolo11n.pt data=coco8.yaml batch=1 imgsz=640
127
+ yolo val model=yolo26n.pt data=coco8.yaml batch=1 imgsz=640
128
128
 
129
- 4. Export a YOLO11n classification model to ONNX format at image size 224 by 128 (no TASK required)
130
- yolo export model=yolo11n-cls.pt format=onnx imgsz=224,128
129
+ 4. Export a YOLO26n classification model to ONNX format at image size 224 by 128 (no TASK required)
130
+ yolo export model=yolo26n-cls.pt format=onnx imgsz=224,128
131
131
 
132
132
  5. Ultralytics solutions usage
133
133
  yolo solutions count or any of {list(SOLUTION_MAP.keys())[1:-1]} source="path/to/video.mp4"
@@ -305,8 +305,6 @@ def get_cfg(
305
305
  # Merge overrides
306
306
  if overrides:
307
307
  overrides = cfg2dict(overrides)
308
- if "save_dir" not in cfg:
309
- overrides.pop("save_dir", None) # special override keys to ignore
310
308
  check_dict_alignment(cfg, overrides)
311
309
  cfg = {**cfg, **overrides} # merge cfg and overrides dicts (prefer overrides)
312
310
 
@@ -494,7 +492,7 @@ def check_dict_alignment(
494
492
  base_keys, custom_keys = (frozenset(x.keys()) for x in (base, custom))
495
493
  # Allow 'augmentations' as a valid custom parameter for custom Albumentations transforms
496
494
  if allowed_custom_keys is None:
497
- allowed_custom_keys = {"augmentations"}
495
+ allowed_custom_keys = {"augmentations", "save_dir"}
498
496
  if mismatched := [k for k in custom_keys if k not in base_keys and k not in allowed_custom_keys]:
499
497
  from difflib import get_close_matches
500
498
 
@@ -606,7 +604,7 @@ def handle_yolo_settings(args: list[str]) -> None:
606
604
 
607
605
  Examples:
608
606
  >>> handle_yolo_settings(["reset"]) # Reset YOLO settings
609
- >>> handle_yolo_settings(["default_cfg_path=yolo11n.yaml"]) # Update a specific setting
607
+ >>> handle_yolo_settings(["default_cfg_path=yolo26n.yaml"]) # Update a specific setting
610
608
 
611
609
  Notes:
612
610
  - If no arguments are provided, the function will display the current settings.
@@ -651,7 +649,7 @@ def handle_yolo_solutions(args: list[str]) -> None:
651
649
  >>> handle_yolo_solutions(["analytics", "conf=0.25", "source=path/to/video.mp4"])
652
650
 
653
651
  Run inference with custom configuration, requires Streamlit version 1.29.0 or higher.
654
- >>> handle_yolo_solutions(["inference", "model=yolo11n.pt"])
652
+ >>> handle_yolo_solutions(["inference", "model=yolo26n.pt"])
655
653
 
656
654
  Notes:
657
655
  - Arguments can be provided in the format 'key=value' or as boolean flags
@@ -709,7 +707,7 @@ def handle_yolo_solutions(args: list[str]) -> None:
709
707
  str(ROOT / "solutions/streamlit_inference.py"),
710
708
  "--server.headless",
711
709
  "true",
712
- overrides.pop("model", "yolo11n.pt"),
710
+ overrides.pop("model", "yolo26n.pt"),
713
711
  ]
714
712
  )
715
713
  else:
@@ -760,9 +758,9 @@ def parse_key_value_pair(pair: str = "key=value") -> tuple:
760
758
  AssertionError: If the value is missing or empty.
761
759
 
762
760
  Examples:
763
- >>> key, value = parse_key_value_pair("model=yolo11n.pt")
761
+ >>> key, value = parse_key_value_pair("model=yolo26n.pt")
764
762
  >>> print(f"Key: {key}, Value: {value}")
765
- Key: model, Value: yolo11n.pt
763
+ Key: model, Value: yolo26n.pt
766
764
 
767
765
  >>> key, value = parse_key_value_pair("epochs=100")
768
766
  >>> print(f"Key: {key}, Value: {value}")
@@ -834,13 +832,13 @@ def entrypoint(debug: str = "") -> None:
834
832
 
835
833
  Examples:
836
834
  Train a detection model for 10 epochs with an initial learning_rate of 0.01:
837
- >>> entrypoint("train data=coco8.yaml model=yolo11n.pt epochs=10 lr0=0.01")
835
+ >>> entrypoint("train data=coco8.yaml model=yolo26n.pt epochs=10 lr0=0.01")
838
836
 
839
837
  Predict a YouTube video using a pretrained segmentation model at image size 320:
840
- >>> entrypoint("predict model=yolo11n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320")
838
+ >>> entrypoint("predict model=yolo26n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320")
841
839
 
842
840
  Validate a pretrained detection model at batch-size 1 and image size 640:
843
- >>> entrypoint("val model=yolo11n.pt data=coco8.yaml batch=1 imgsz=640")
841
+ >>> entrypoint("val model=yolo26n.pt data=coco8.yaml batch=1 imgsz=640")
844
842
 
845
843
  Notes:
846
844
  - If no arguments are passed, the function will display the usage help message.
@@ -935,7 +933,7 @@ def entrypoint(debug: str = "") -> None:
935
933
  # Model
936
934
  model = overrides.pop("model", DEFAULT_CFG.model)
937
935
  if model is None:
938
- model = "yolo11n.pt"
936
+ model = "yolo26n.pt"
939
937
  LOGGER.warning(f"'model' argument is missing. Using default 'model={model}'.")
940
938
  overrides["model"] = model
941
939
  stem = Path(model).stem.lower()
@@ -1024,5 +1022,5 @@ def copy_default_cfg() -> None:
1024
1022
 
1025
1023
 
1026
1024
  if __name__ == "__main__":
1027
- # Example: entrypoint(debug='yolo predict model=yolo11n.pt')
1025
+ # Example: entrypoint(debug='yolo predict model=yolo26n.pt')
1028
1026
  entrypoint(debug="")
@@ -1,12 +1,12 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- # Ultralytics YOLO26 object detection model with P3/8 - P5/32 outputs
3
+ # Ultralytics YOLO26-cls image classification model
4
4
  # Model docs: https://docs.ultralytics.com/models/yolo26
5
5
  # Task docs: https://docs.ultralytics.com/tasks/classify
6
6
 
7
7
  # Parameters
8
8
  nc: 1000 # number of classes
9
- scales: # model compound scaling constants, i.e. 'model=YOLO26n.yaml' will call YOLO26.yaml with scale 'n'
9
+ scales: # model compound scaling constants, i.e. 'model=yolo26n-cls.yaml' will call yolo26-cls.yaml with scale 'n'
10
10
  # [depth, width, max_channels]
11
11
  n: [0.50, 0.25, 1024] # summary: 86 layers, 2,812,104 parameters, 2,812,104 gradients, 0.5 GFLOPs
12
12
  s: [0.50, 0.50, 1024] # summary: 86 layers, 6,724,008 parameters, 6,724,008 gradients, 1.6 GFLOPs
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- # Ultralytics YOLO26 object detection model with P3/8 - P5/32 outputs
3
+ # Ultralytics YOLO26-obb Oriented Bounding Boxes (OBB) model with P3/8 - P5/32 outputs
4
4
  # Model docs: https://docs.ultralytics.com/models/yolo26
5
5
  # Task docs: https://docs.ultralytics.com/tasks/obb
6
6
 
@@ -8,7 +8,7 @@
8
8
  nc: 80 # number of classes
9
9
  end2end: True # whether to use end-to-end mode
10
10
  reg_max: 1 # DFL bins
11
- scales: # model compound scaling constants, i.e. 'model=YOLO26n.yaml' will call YOLO26.yaml with scale 'n'
11
+ scales: # model compound scaling constants, i.e. 'model=yolo26n-obb.yaml' will call yolo26-obb.yaml with scale 'n'
12
12
  # [depth, width, max_channels]
13
13
  n: [0.50, 0.25, 1024] # summary: 291 layers, 2,715,614 parameters, 2,715,614 gradients, 16.9 GFLOPs
14
14
  s: [0.50, 0.50, 1024] # summary: 291 layers, 10,582,142 parameters, 10,582,142 gradients, 63.5 GFLOPs
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- # Ultralytics YOLO26 object detection model with P3/8 - P5/32 outputs
3
+ # Ultralytics YOLO26 object detection model with P2/4 - P5/32 outputs
4
4
  # Model docs: https://docs.ultralytics.com/models/yolo26
5
5
  # Task docs: https://docs.ultralytics.com/tasks/detect
6
6
 
@@ -8,13 +8,13 @@
8
8
  nc: 80 # number of classes
9
9
  end2end: True # whether to use end-to-end mode
10
10
  reg_max: 1 # DFL bins
11
- scales: # model compound scaling constants, i.e. 'model=YOLO26n.yaml' will call YOLO26.yaml with scale 'n'
11
+ scales: # model compound scaling constants, i.e. 'model=yolo26n-p2.yaml' will call yolo26-p2.yaml with scale 'n'
12
12
  # [depth, width, max_channels]
13
- n: [0.50, 0.25, 1024] # summary: 181 layers, 2624080 parameters, 2624064 gradients, 6.6 GFLOPs
14
- s: [0.50, 0.50, 1024] # summary: 181 layers, 9458752 parameters, 9458736 gradients, 21.7 GFLOPs
15
- m: [0.50, 1.00, 512] # summary: 231 layers, 20114688 parameters, 20114672 gradients, 68.5 GFLOPs
16
- l: [1.00, 1.00, 512] # summary: 357 layers, 25372160 parameters, 25372144 gradients, 87.6 GFLOPs
17
- x: [1.00, 1.50, 512] # summary: 357 layers, 56966176 parameters, 56966160 gradients, 196.0 GFLOPs
13
+ n: [0.50, 0.25, 1024] # summary: 329 layers, 2,662,400 parameters, 2,662,400 gradients, 9.5 GFLOPs
14
+ s: [0.50, 0.50, 1024] # summary: 329 layers, 9,765,856 parameters, 9,765,856 gradients, 27.8 GFLOPs
15
+ m: [0.50, 1.00, 512] # summary: 349 layers, 21,144,288 parameters, 21,144,288 gradients, 91.4 GFLOPs
16
+ l: [1.00, 1.00, 512] # summary: 489 layers, 25,815,520 parameters, 25,815,520 gradients, 115.3 GFLOPs
17
+ x: [1.00, 1.50, 512] # summary: 489 layers, 57,935,232 parameters, 57,935,232 gradients, 256.9 GFLOPs
18
18
 
19
19
  # YOLO26n backbone
20
20
  backbone:
@@ -42,12 +42,12 @@ head:
42
42
  - [-1, 2, C3k2, [256, True]] # 16 (P3/8-small)
43
43
 
44
44
  - [-1, 1, nn.Upsample, [None, 2, "nearest"]]
45
- - [[-1, 2], 1, Concat, [1]] # cat backbone P3
46
- - [-1, 2, C3k2, [128, True]] # 19 (P3/8-small)
45
+ - [[-1, 2], 1, Concat, [1]] # cat backbone P2
46
+ - [-1, 2, C3k2, [128, True]] # 19 (P2/4-xsmall)
47
47
 
48
48
  - [-1, 1, Conv, [128, 3, 2]]
49
49
  - [[-1, 16], 1, Concat, [1]] # cat head P3
50
- - [-1, 2, C3k2, [256, True]] # 22 (P4/16-medium)
50
+ - [-1, 2, C3k2, [256, True]] # 22 (P3/8-small)
51
51
 
52
52
  - [-1, 1, Conv, [256, 3, 2]]
53
53
  - [[-1, 13], 1, Concat, [1]] # cat head P4
@@ -57,4 +57,4 @@ head:
57
57
  - [[-1, 10], 1, Concat, [1]] # cat head P5
58
58
  - [-1, 1, C3k2, [1024, True, 0.5, True]] # 28 (P5/32-large)
59
59
 
60
- - [[19, 22, 25, 28], 1, Detect, [nc]] # Detect(P3, P4, P5)
60
+ - [[19, 22, 25, 28], 1, Detect, [nc]] # Detect(P2, P3, P4, P5)