ultralytics 8.3.66__tar.gz → 8.3.68__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (249) hide show
  1. {ultralytics-8.3.66/ultralytics.egg-info → ultralytics-8.3.68}/PKG-INFO +1 -1
  2. {ultralytics-8.3.66 → ultralytics-8.3.68}/tests/test_exports.py +41 -38
  3. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/__init__.py +1 -1
  4. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/engine/exporter.py +133 -20
  5. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/engine/results.py +4 -1
  6. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/nas/val.py +1 -7
  7. ultralytics-8.3.68/ultralytics/models/yolo/detect/predict.py +73 -0
  8. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/detect/val.py +4 -0
  9. ultralytics-8.3.68/ultralytics/models/yolo/obb/predict.py +46 -0
  10. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/obb/val.py +0 -14
  11. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/pose/predict.py +18 -25
  12. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/pose/val.py +0 -13
  13. ultralytics-8.3.68/ultralytics/models/yolo/segment/predict.py +74 -0
  14. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/segment/val.py +1 -10
  15. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/nn/autobackend.py +11 -4
  16. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/__init__.py +7 -2
  17. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/benchmarks.py +2 -2
  18. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/ops.py +22 -6
  19. {ultralytics-8.3.66 → ultralytics-8.3.68/ultralytics.egg-info}/PKG-INFO +1 -1
  20. ultralytics-8.3.66/ultralytics/models/yolo/detect/predict.py +0 -41
  21. ultralytics-8.3.66/ultralytics/models/yolo/obb/predict.py +0 -53
  22. ultralytics-8.3.66/ultralytics/models/yolo/segment/predict.py +0 -55
  23. {ultralytics-8.3.66 → ultralytics-8.3.68}/LICENSE +0 -0
  24. {ultralytics-8.3.66 → ultralytics-8.3.68}/README.md +0 -0
  25. {ultralytics-8.3.66 → ultralytics-8.3.68}/pyproject.toml +0 -0
  26. {ultralytics-8.3.66 → ultralytics-8.3.68}/setup.cfg +0 -0
  27. {ultralytics-8.3.66 → ultralytics-8.3.68}/tests/__init__.py +0 -0
  28. {ultralytics-8.3.66 → ultralytics-8.3.68}/tests/conftest.py +0 -0
  29. {ultralytics-8.3.66 → ultralytics-8.3.68}/tests/test_cli.py +0 -0
  30. {ultralytics-8.3.66 → ultralytics-8.3.68}/tests/test_cuda.py +0 -0
  31. {ultralytics-8.3.66 → ultralytics-8.3.68}/tests/test_engine.py +0 -0
  32. {ultralytics-8.3.66 → ultralytics-8.3.68}/tests/test_integrations.py +0 -0
  33. {ultralytics-8.3.66 → ultralytics-8.3.68}/tests/test_python.py +0 -0
  34. {ultralytics-8.3.66 → ultralytics-8.3.68}/tests/test_solutions.py +0 -0
  35. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/assets/bus.jpg +0 -0
  36. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/assets/zidane.jpg +0 -0
  37. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/__init__.py +0 -0
  38. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/Argoverse.yaml +0 -0
  39. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/DOTAv1.5.yaml +0 -0
  40. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/DOTAv1.yaml +0 -0
  41. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/GlobalWheat2020.yaml +0 -0
  42. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/ImageNet.yaml +0 -0
  43. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/Objects365.yaml +0 -0
  44. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/SKU-110K.yaml +0 -0
  45. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/VOC.yaml +0 -0
  46. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/VisDrone.yaml +0 -0
  47. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/african-wildlife.yaml +0 -0
  48. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/brain-tumor.yaml +0 -0
  49. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/carparts-seg.yaml +0 -0
  50. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/coco-pose.yaml +0 -0
  51. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/coco.yaml +0 -0
  52. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/coco128-seg.yaml +0 -0
  53. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/coco128.yaml +0 -0
  54. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/coco8-pose.yaml +0 -0
  55. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/coco8-seg.yaml +0 -0
  56. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/coco8.yaml +0 -0
  57. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/crack-seg.yaml +0 -0
  58. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/dog-pose.yaml +0 -0
  59. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/dota8.yaml +0 -0
  60. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/hand-keypoints.yaml +0 -0
  61. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/lvis.yaml +0 -0
  62. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/medical-pills.yaml +0 -0
  63. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/open-images-v7.yaml +0 -0
  64. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/package-seg.yaml +0 -0
  65. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/signature.yaml +0 -0
  66. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/tiger-pose.yaml +0 -0
  67. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/datasets/xView.yaml +0 -0
  68. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/default.yaml +0 -0
  69. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/11/yolo11-cls-resnet18.yaml +0 -0
  70. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/11/yolo11-cls.yaml +0 -0
  71. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/11/yolo11-obb.yaml +0 -0
  72. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/11/yolo11-pose.yaml +0 -0
  73. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/11/yolo11-seg.yaml +0 -0
  74. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/11/yolo11.yaml +0 -0
  75. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +0 -0
  76. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +0 -0
  77. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +0 -0
  78. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +0 -0
  79. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v10/yolov10b.yaml +0 -0
  80. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v10/yolov10l.yaml +0 -0
  81. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v10/yolov10m.yaml +0 -0
  82. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v10/yolov10n.yaml +0 -0
  83. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v10/yolov10s.yaml +0 -0
  84. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v10/yolov10x.yaml +0 -0
  85. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v3/yolov3-spp.yaml +0 -0
  86. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v3/yolov3-tiny.yaml +0 -0
  87. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v3/yolov3.yaml +0 -0
  88. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v5/yolov5-p6.yaml +0 -0
  89. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v5/yolov5.yaml +0 -0
  90. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v6/yolov6.yaml +0 -0
  91. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +0 -0
  92. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +0 -0
  93. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-cls.yaml +0 -0
  94. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +0 -0
  95. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +0 -0
  96. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-ghost.yaml +0 -0
  97. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-obb.yaml +0 -0
  98. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-p2.yaml +0 -0
  99. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-p6.yaml +0 -0
  100. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +0 -0
  101. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-pose.yaml +0 -0
  102. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +0 -0
  103. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +0 -0
  104. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-seg.yaml +0 -0
  105. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-world.yaml +0 -0
  106. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8-worldv2.yaml +0 -0
  107. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v8/yolov8.yaml +0 -0
  108. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v9/yolov9c-seg.yaml +0 -0
  109. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v9/yolov9c.yaml +0 -0
  110. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v9/yolov9e-seg.yaml +0 -0
  111. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v9/yolov9e.yaml +0 -0
  112. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v9/yolov9m.yaml +0 -0
  113. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v9/yolov9s.yaml +0 -0
  114. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/models/v9/yolov9t.yaml +0 -0
  115. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/solutions/default.yaml +0 -0
  116. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/trackers/botsort.yaml +0 -0
  117. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/cfg/trackers/bytetrack.yaml +0 -0
  118. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/data/__init__.py +0 -0
  119. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/data/annotator.py +0 -0
  120. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/data/augment.py +0 -0
  121. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/data/base.py +0 -0
  122. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/data/build.py +0 -0
  123. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/data/converter.py +0 -0
  124. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/data/dataset.py +0 -0
  125. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/data/loaders.py +0 -0
  126. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/data/split_dota.py +0 -0
  127. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/data/utils.py +0 -0
  128. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/engine/__init__.py +0 -0
  129. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/engine/model.py +0 -0
  130. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/engine/predictor.py +0 -0
  131. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/engine/trainer.py +0 -0
  132. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/engine/tuner.py +0 -0
  133. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/engine/validator.py +0 -0
  134. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/hub/__init__.py +0 -0
  135. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/hub/auth.py +0 -0
  136. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/hub/google/__init__.py +0 -0
  137. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/hub/session.py +0 -0
  138. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/hub/utils.py +0 -0
  139. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/__init__.py +0 -0
  140. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/fastsam/__init__.py +0 -0
  141. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/fastsam/model.py +0 -0
  142. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/fastsam/predict.py +0 -0
  143. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/fastsam/utils.py +0 -0
  144. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/fastsam/val.py +0 -0
  145. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/nas/__init__.py +0 -0
  146. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/nas/model.py +0 -0
  147. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/nas/predict.py +0 -0
  148. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/rtdetr/__init__.py +0 -0
  149. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/rtdetr/model.py +0 -0
  150. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/rtdetr/predict.py +0 -0
  151. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/rtdetr/train.py +0 -0
  152. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/rtdetr/val.py +0 -0
  153. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/__init__.py +0 -0
  154. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/amg.py +0 -0
  155. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/build.py +0 -0
  156. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/model.py +0 -0
  157. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/modules/__init__.py +0 -0
  158. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/modules/blocks.py +0 -0
  159. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/modules/decoders.py +0 -0
  160. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/modules/encoders.py +0 -0
  161. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/modules/memory_attention.py +0 -0
  162. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/modules/sam.py +0 -0
  163. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/modules/tiny_encoder.py +0 -0
  164. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/modules/transformer.py +0 -0
  165. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/modules/utils.py +0 -0
  166. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/sam/predict.py +0 -0
  167. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/utils/__init__.py +0 -0
  168. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/utils/loss.py +0 -0
  169. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/utils/ops.py +0 -0
  170. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/__init__.py +0 -0
  171. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/classify/__init__.py +0 -0
  172. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/classify/predict.py +0 -0
  173. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/classify/train.py +0 -0
  174. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/classify/val.py +0 -0
  175. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/detect/__init__.py +0 -0
  176. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/detect/train.py +0 -0
  177. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/model.py +0 -0
  178. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/obb/__init__.py +0 -0
  179. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/obb/train.py +0 -0
  180. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/pose/__init__.py +0 -0
  181. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/pose/train.py +0 -0
  182. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/segment/__init__.py +0 -0
  183. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/segment/train.py +0 -0
  184. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/world/__init__.py +0 -0
  185. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/world/train.py +0 -0
  186. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/models/yolo/world/train_world.py +0 -0
  187. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/nn/__init__.py +0 -0
  188. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/nn/modules/__init__.py +0 -0
  189. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/nn/modules/activation.py +0 -0
  190. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/nn/modules/block.py +0 -0
  191. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/nn/modules/conv.py +0 -0
  192. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/nn/modules/head.py +0 -0
  193. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/nn/modules/transformer.py +0 -0
  194. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/nn/modules/utils.py +0 -0
  195. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/nn/tasks.py +0 -0
  196. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/__init__.py +0 -0
  197. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/ai_gym.py +0 -0
  198. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/analytics.py +0 -0
  199. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/distance_calculation.py +0 -0
  200. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/heatmap.py +0 -0
  201. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/object_counter.py +0 -0
  202. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/parking_management.py +0 -0
  203. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/queue_management.py +0 -0
  204. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/region_counter.py +0 -0
  205. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/security_alarm.py +0 -0
  206. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/solutions.py +0 -0
  207. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/speed_estimation.py +0 -0
  208. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/streamlit_inference.py +0 -0
  209. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/solutions/trackzone.py +0 -0
  210. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/trackers/__init__.py +0 -0
  211. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/trackers/basetrack.py +0 -0
  212. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/trackers/bot_sort.py +0 -0
  213. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/trackers/byte_tracker.py +0 -0
  214. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/trackers/track.py +0 -0
  215. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/trackers/utils/__init__.py +0 -0
  216. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/trackers/utils/gmc.py +0 -0
  217. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/trackers/utils/kalman_filter.py +0 -0
  218. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/trackers/utils/matching.py +0 -0
  219. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/autobatch.py +0 -0
  220. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/callbacks/__init__.py +0 -0
  221. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/callbacks/base.py +0 -0
  222. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/callbacks/clearml.py +0 -0
  223. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/callbacks/comet.py +0 -0
  224. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/callbacks/dvc.py +0 -0
  225. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/callbacks/hub.py +0 -0
  226. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/callbacks/mlflow.py +0 -0
  227. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/callbacks/neptune.py +0 -0
  228. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/callbacks/raytune.py +0 -0
  229. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/callbacks/tensorboard.py +0 -0
  230. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/callbacks/wb.py +0 -0
  231. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/checks.py +0 -0
  232. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/dist.py +0 -0
  233. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/downloads.py +0 -0
  234. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/errors.py +0 -0
  235. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/files.py +0 -0
  236. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/instance.py +0 -0
  237. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/loss.py +0 -0
  238. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/metrics.py +0 -0
  239. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/patches.py +0 -0
  240. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/plotting.py +0 -0
  241. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/tal.py +0 -0
  242. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/torch_utils.py +0 -0
  243. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/triton.py +0 -0
  244. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics/utils/tuner.py +0 -0
  245. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics.egg-info/SOURCES.txt +0 -0
  246. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics.egg-info/dependency_links.txt +0 -0
  247. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics.egg-info/entry_points.txt +0 -0
  248. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics.egg-info/requires.txt +0 -0
  249. {ultralytics-8.3.66 → ultralytics-8.3.68}/ultralytics.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ultralytics
3
- Version: 8.3.66
3
+ Version: 8.3.68
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -43,14 +43,16 @@ def test_export_openvino():
43
43
  @pytest.mark.slow
44
44
  @pytest.mark.skipif(not TORCH_1_13, reason="OpenVINO requires torch>=1.13")
45
45
  @pytest.mark.parametrize(
46
- "task, dynamic, int8, half, batch",
47
- [ # generate all combinations but exclude those where both int8 and half are True
48
- (task, dynamic, int8, half, batch)
49
- for task, dynamic, int8, half, batch in product(TASKS, [True, False], [True, False], [True, False], [1, 2])
50
- if not (int8 and half) # exclude cases where both int8 and half are True
46
+ "task, dynamic, int8, half, batch, nms",
47
+ [ # generate all combinations except for exclusion cases
48
+ (task, dynamic, int8, half, batch, nms)
49
+ for task, dynamic, int8, half, batch, nms in product(
50
+ TASKS, [True, False], [True, False], [True, False], [1, 2], [True, False]
51
+ )
52
+ if not ((int8 and half) or (task == "classify" and nms))
51
53
  ],
52
54
  )
53
- def test_export_openvino_matrix(task, dynamic, int8, half, batch):
55
+ def test_export_openvino_matrix(task, dynamic, int8, half, batch, nms):
54
56
  """Test YOLO model exports to OpenVINO under various configuration matrix conditions."""
55
57
  file = YOLO(TASK2MODEL[task]).export(
56
58
  format="openvino",
@@ -60,6 +62,7 @@ def test_export_openvino_matrix(task, dynamic, int8, half, batch):
60
62
  half=half,
61
63
  batch=batch,
62
64
  data=TASK2DATA[task],
65
+ nms=nms,
63
66
  )
64
67
  if WINDOWS:
65
68
  # Use unique filenames due to Windows file permissions bug possibly due to latent threaded use
@@ -72,36 +75,39 @@ def test_export_openvino_matrix(task, dynamic, int8, half, batch):
72
75
 
73
76
  @pytest.mark.slow
74
77
  @pytest.mark.parametrize(
75
- "task, dynamic, int8, half, batch, simplify", product(TASKS, [True, False], [False], [False], [1, 2], [True, False])
78
+ "task, dynamic, int8, half, batch, simplify, nms",
79
+ [ # generate all combinations except for exclusion cases
80
+ (task, dynamic, int8, half, batch, simplify, nms)
81
+ for task, dynamic, int8, half, batch, simplify, nms in product(
82
+ TASKS, [True, False], [False], [False], [1, 2], [True, False], [True, False]
83
+ )
84
+ if not ((int8 and half) or (task == "classify" and nms) or (task == "obb" and nms and not TORCH_1_13))
85
+ ],
76
86
  )
77
- def test_export_onnx_matrix(task, dynamic, int8, half, batch, simplify):
87
+ def test_export_onnx_matrix(task, dynamic, int8, half, batch, simplify, nms):
78
88
  """Test YOLO exports to ONNX format with various configurations and parameters."""
79
89
  file = YOLO(TASK2MODEL[task]).export(
80
- format="onnx",
81
- imgsz=32,
82
- dynamic=dynamic,
83
- int8=int8,
84
- half=half,
85
- batch=batch,
86
- simplify=simplify,
90
+ format="onnx", imgsz=32, dynamic=dynamic, int8=int8, half=half, batch=batch, simplify=simplify, nms=nms
87
91
  )
88
92
  YOLO(file)([SOURCE] * batch, imgsz=64 if dynamic else 32) # exported model inference
89
93
  Path(file).unlink() # cleanup
90
94
 
91
95
 
92
96
  @pytest.mark.slow
93
- @pytest.mark.parametrize("task, dynamic, int8, half, batch", product(TASKS, [False], [False], [False], [1, 2]))
94
- def test_export_torchscript_matrix(task, dynamic, int8, half, batch):
97
+ @pytest.mark.parametrize(
98
+ "task, dynamic, int8, half, batch, nms",
99
+ [ # generate all combinations except for exclusion cases
100
+ (task, dynamic, int8, half, batch, nms)
101
+ for task, dynamic, int8, half, batch, nms in product(TASKS, [False], [False], [False], [1, 2], [True, False])
102
+ if not (task == "classify" and nms)
103
+ ],
104
+ )
105
+ def test_export_torchscript_matrix(task, dynamic, int8, half, batch, nms):
95
106
  """Tests YOLO model exports to TorchScript format under varied configurations."""
96
107
  file = YOLO(TASK2MODEL[task]).export(
97
- format="torchscript",
98
- imgsz=32,
99
- dynamic=dynamic,
100
- int8=int8,
101
- half=half,
102
- batch=batch,
108
+ format="torchscript", imgsz=32, dynamic=dynamic, int8=int8, half=half, batch=batch, nms=nms
103
109
  )
104
- YOLO(file)([SOURCE] * 3, imgsz=64 if dynamic else 32) # exported model inference at batch=3
110
+ YOLO(file)([SOURCE] * batch, imgsz=64 if dynamic else 32) # exported model inference
105
111
  Path(file).unlink() # cleanup
106
112
 
107
113
 
@@ -111,10 +117,10 @@ def test_export_torchscript_matrix(task, dynamic, int8, half, batch):
111
117
  @pytest.mark.skipif(checks.IS_PYTHON_3_12, reason="CoreML not supported in Python 3.12")
112
118
  @pytest.mark.parametrize(
113
119
  "task, dynamic, int8, half, batch",
114
- [ # generate all combinations but exclude those where both int8 and half are True
120
+ [ # generate all combinations except for exclusion cases
115
121
  (task, dynamic, int8, half, batch)
116
122
  for task, dynamic, int8, half, batch in product(TASKS, [False], [True, False], [True, False], [1])
117
- if not (int8 and half) # exclude cases where both int8 and half are True
123
+ if not (int8 and half)
118
124
  ],
119
125
  )
120
126
  def test_export_coreml_matrix(task, dynamic, int8, half, batch):
@@ -135,22 +141,19 @@ def test_export_coreml_matrix(task, dynamic, int8, half, batch):
135
141
  @pytest.mark.skipif(not checks.IS_PYTHON_MINIMUM_3_10, reason="TFLite export requires Python>=3.10")
136
142
  @pytest.mark.skipif(not LINUX, reason="Test disabled as TF suffers from install conflicts on Windows and macOS")
137
143
  @pytest.mark.parametrize(
138
- "task, dynamic, int8, half, batch",
139
- [ # generate all combinations but exclude those where both int8 and half are True
140
- (task, dynamic, int8, half, batch)
141
- for task, dynamic, int8, half, batch in product(TASKS, [False], [True, False], [True, False], [1])
142
- if not (int8 and half) # exclude cases where both int8 and half are True
144
+ "task, dynamic, int8, half, batch, nms",
145
+ [ # generate all combinations except for exclusion cases
146
+ (task, dynamic, int8, half, batch, nms)
147
+ for task, dynamic, int8, half, batch, nms in product(
148
+ TASKS, [False], [True, False], [True, False], [1], [True, False]
149
+ )
150
+ if not ((int8 and half) or (task == "classify" and nms))
143
151
  ],
144
152
  )
145
- def test_export_tflite_matrix(task, dynamic, int8, half, batch):
153
+ def test_export_tflite_matrix(task, dynamic, int8, half, batch, nms):
146
154
  """Test YOLO exports to TFLite format considering various export configurations."""
147
155
  file = YOLO(TASK2MODEL[task]).export(
148
- format="tflite",
149
- imgsz=32,
150
- dynamic=dynamic,
151
- int8=int8,
152
- half=half,
153
- batch=batch,
156
+ format="tflite", imgsz=32, dynamic=dynamic, int8=int8, half=half, batch=batch, nms=nms
154
157
  )
155
158
  YOLO(file)([SOURCE] * batch, imgsz=32) # exported model inference at batch=3
156
159
  Path(file).unlink() # cleanup
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- __version__ = "8.3.66"
3
+ __version__ = "8.3.68"
4
4
 
5
5
  import os
6
6
 
@@ -75,7 +75,7 @@ from ultralytics.data.dataset import YOLODataset
75
75
  from ultralytics.data.utils import check_cls_dataset, check_det_dataset
76
76
  from ultralytics.nn.autobackend import check_class_names, default_class_names
77
77
  from ultralytics.nn.modules import C2f, Classify, Detect, RTDETRDecoder
78
- from ultralytics.nn.tasks import DetectionModel, SegmentationModel, WorldModel
78
+ from ultralytics.nn.tasks import ClassificationModel, DetectionModel, SegmentationModel, WorldModel
79
79
  from ultralytics.utils import (
80
80
  ARM64,
81
81
  DEFAULT_CFG,
@@ -103,7 +103,7 @@ from ultralytics.utils.checks import (
103
103
  )
104
104
  from ultralytics.utils.downloads import attempt_download_asset, get_github_assets, safe_download
105
105
  from ultralytics.utils.files import file_size, spaces_in_path
106
- from ultralytics.utils.ops import Profile
106
+ from ultralytics.utils.ops import Profile, nms_rotated, xywh2xyxy
107
107
  from ultralytics.utils.torch_utils import TORCH_1_13, get_latest_opset, select_device
108
108
 
109
109
 
@@ -111,16 +111,16 @@ def export_formats():
111
111
  """Ultralytics YOLO export formats."""
112
112
  x = [
113
113
  ["PyTorch", "-", ".pt", True, True, []],
114
- ["TorchScript", "torchscript", ".torchscript", True, True, ["batch", "optimize"]],
115
- ["ONNX", "onnx", ".onnx", True, True, ["batch", "dynamic", "half", "opset", "simplify"]],
116
- ["OpenVINO", "openvino", "_openvino_model", True, False, ["batch", "dynamic", "half", "int8"]],
117
- ["TensorRT", "engine", ".engine", False, True, ["batch", "dynamic", "half", "int8", "simplify"]],
114
+ ["TorchScript", "torchscript", ".torchscript", True, True, ["batch", "optimize", "nms"]],
115
+ ["ONNX", "onnx", ".onnx", True, True, ["batch", "dynamic", "half", "opset", "simplify", "nms"]],
116
+ ["OpenVINO", "openvino", "_openvino_model", True, False, ["batch", "dynamic", "half", "int8", "nms"]],
117
+ ["TensorRT", "engine", ".engine", False, True, ["batch", "dynamic", "half", "int8", "simplify", "nms"]],
118
118
  ["CoreML", "coreml", ".mlpackage", True, False, ["batch", "half", "int8", "nms"]],
119
- ["TensorFlow SavedModel", "saved_model", "_saved_model", True, True, ["batch", "int8", "keras"]],
119
+ ["TensorFlow SavedModel", "saved_model", "_saved_model", True, True, ["batch", "int8", "keras", "nms"]],
120
120
  ["TensorFlow GraphDef", "pb", ".pb", True, True, ["batch"]],
121
- ["TensorFlow Lite", "tflite", ".tflite", True, False, ["batch", "half", "int8"]],
121
+ ["TensorFlow Lite", "tflite", ".tflite", True, False, ["batch", "half", "int8", "nms"]],
122
122
  ["TensorFlow Edge TPU", "edgetpu", "_edgetpu.tflite", True, False, []],
123
- ["TensorFlow.js", "tfjs", "_web_model", True, False, ["batch", "half", "int8"]],
123
+ ["TensorFlow.js", "tfjs", "_web_model", True, False, ["batch", "half", "int8", "nms"]],
124
124
  ["PaddlePaddle", "paddle", "_paddle_model", True, True, ["batch"]],
125
125
  ["MNN", "mnn", ".mnn", True, True, ["batch", "half", "int8"]],
126
126
  ["NCNN", "ncnn", "_ncnn_model", True, True, ["batch", "half"]],
@@ -281,6 +281,12 @@ class Exporter:
281
281
  )
282
282
  if self.args.int8 and tflite:
283
283
  assert not getattr(model, "end2end", False), "TFLite INT8 export not supported for end2end models."
284
+ if self.args.nms:
285
+ assert not isinstance(model, ClassificationModel), "'nms=True' is not valid for classification models."
286
+ if getattr(model, "end2end", False):
287
+ LOGGER.warning("WARNING ⚠️ 'nms=True' is not available for end2end models. Forcing 'nms=False'.")
288
+ self.args.nms = False
289
+ self.args.conf = self.args.conf or 0.25 # set conf default value for nms export
284
290
  if edgetpu:
285
291
  if not LINUX:
286
292
  raise SystemError("Edge TPU export only supported on Linux. See https://coral.ai/docs/edgetpu/compiler")
@@ -344,8 +350,8 @@ class Exporter:
344
350
  )
345
351
 
346
352
  y = None
347
- for _ in range(2):
348
- y = model(im) # dry runs
353
+ for _ in range(2): # dry runs
354
+ y = NMSModel(model, self.args)(im) if self.args.nms and not coreml else model(im)
349
355
  if self.args.half and onnx and self.device.type != "cpu":
350
356
  im, model = im.half(), model.half() # to FP16
351
357
 
@@ -476,7 +482,7 @@ class Exporter:
476
482
  LOGGER.info(f"\n{prefix} starting export with torch {torch.__version__}...")
477
483
  f = self.file.with_suffix(".torchscript")
478
484
 
479
- ts = torch.jit.trace(self.model, self.im, strict=False)
485
+ ts = torch.jit.trace(NMSModel(self.model, self.args) if self.args.nms else self.model, self.im, strict=False)
480
486
  extra_files = {"config.txt": json.dumps(self.metadata)} # torch._C.ExtraFilesMap()
481
487
  if self.args.optimize: # https://pytorch.org/tutorials/recipes/mobile_interpreter.html
482
488
  LOGGER.info(f"{prefix} optimizing for mobile...")
@@ -499,19 +505,29 @@ class Exporter:
499
505
  opset_version = self.args.opset or get_latest_opset()
500
506
  LOGGER.info(f"\n{prefix} starting export with onnx {onnx.__version__} opset {opset_version}...")
501
507
  f = str(self.file.with_suffix(".onnx"))
502
-
503
508
  output_names = ["output0", "output1"] if isinstance(self.model, SegmentationModel) else ["output0"]
504
509
  dynamic = self.args.dynamic
505
510
  if dynamic:
511
+ self.model.cpu() # dynamic=True only compatible with cpu
506
512
  dynamic = {"images": {0: "batch", 2: "height", 3: "width"}} # shape(1,3,640,640)
507
513
  if isinstance(self.model, SegmentationModel):
508
514
  dynamic["output0"] = {0: "batch", 2: "anchors"} # shape(1, 116, 8400)
509
515
  dynamic["output1"] = {0: "batch", 2: "mask_height", 3: "mask_width"} # shape(1,32,160,160)
510
516
  elif isinstance(self.model, DetectionModel):
511
517
  dynamic["output0"] = {0: "batch", 2: "anchors"} # shape(1, 84, 8400)
518
+ if self.args.nms: # only batch size is dynamic with NMS
519
+ dynamic["output0"].pop(2)
520
+ if self.args.nms and self.model.task == "obb":
521
+ self.args.opset = opset_version # for NMSModel
522
+ # OBB error https://github.com/pytorch/pytorch/issues/110859#issuecomment-1757841865
523
+ try:
524
+ torch.onnx.register_custom_op_symbolic("aten::lift_fresh", lambda g, x: x, opset_version)
525
+ except RuntimeError: # it will fail if it's already registered
526
+ pass
527
+ check_requirements("onnxslim>=0.1.46") # Older versions has bug with OBB
512
528
 
513
529
  torch.onnx.export(
514
- self.model.cpu() if dynamic else self.model, # dynamic=True only compatible with cpu
530
+ NMSModel(self.model, self.args) if self.args.nms else self.model,
515
531
  self.im.cpu() if dynamic else self.im,
516
532
  f,
517
533
  verbose=False,
@@ -553,7 +569,7 @@ class Exporter:
553
569
  LOGGER.info(f"\n{prefix} starting export with openvino {ov.__version__}...")
554
570
  assert TORCH_1_13, f"OpenVINO export requires torch>=1.13.0 but torch=={torch.__version__} is installed"
555
571
  ov_model = ov.convert_model(
556
- self.model,
572
+ NMSModel(self.model, self.args) if self.args.nms else self.model,
557
573
  input=None if self.args.dynamic else [self.im.shape],
558
574
  example_input=self.im,
559
575
  )
@@ -736,9 +752,6 @@ class Exporter:
736
752
  f = self.file.with_suffix(".mlmodel" if mlmodel else ".mlpackage")
737
753
  if f.is_dir():
738
754
  shutil.rmtree(f)
739
- if self.args.nms and getattr(self.model, "end2end", False):
740
- LOGGER.warning(f"{prefix} WARNING ⚠️ 'nms=True' is not available for end2end models. Forcing 'nms=False'.")
741
- self.args.nms = False
742
755
 
743
756
  bias = [0.0, 0.0, 0.0]
744
757
  scale = 1 / 255
@@ -1438,8 +1451,8 @@ class Exporter:
1438
1451
  nms.coordinatesOutputFeatureName = "coordinates"
1439
1452
  nms.iouThresholdInputFeatureName = "iouThreshold"
1440
1453
  nms.confidenceThresholdInputFeatureName = "confidenceThreshold"
1441
- nms.iouThreshold = 0.45
1442
- nms.confidenceThreshold = 0.25
1454
+ nms.iouThreshold = self.args.iou
1455
+ nms.confidenceThreshold = self.args.conf
1443
1456
  nms.pickTop.perClass = True
1444
1457
  nms.stringClassLabels.vector.extend(names.values())
1445
1458
  nms_model = ct.models.MLModel(nms_spec)
@@ -1507,3 +1520,103 @@ class IOSDetectModel(torch.nn.Module):
1507
1520
  """Normalize predictions of object detection model with input size-dependent factors."""
1508
1521
  xywh, cls = self.model(x)[0].transpose(0, 1).split((4, self.nc), 1)
1509
1522
  return cls, xywh * self.normalize # confidence (3780, 80), coordinates (3780, 4)
1523
+
1524
+
1525
+ class NMSModel(torch.nn.Module):
1526
+ """Model wrapper with embedded NMS for Detect, Segment, Pose and OBB."""
1527
+
1528
+ def __init__(self, model, args):
1529
+ """
1530
+ Initialize the NMSModel.
1531
+
1532
+ Args:
1533
+ model (torch.nn.module): The model to wrap with NMS postprocessing.
1534
+ args (Namespace): The export arguments.
1535
+ """
1536
+ super().__init__()
1537
+ self.model = model
1538
+ self.args = args
1539
+ self.obb = model.task == "obb"
1540
+ self.is_tf = self.args.format in frozenset({"saved_model", "tflite", "tfjs"})
1541
+
1542
+ def forward(self, x):
1543
+ """
1544
+ Performs inference with NMS post-processing. Supports Detect, Segment, OBB and Pose.
1545
+
1546
+ Args:
1547
+ x (torch.tensor): The preprocessed tensor with shape (N, 3, H, W).
1548
+
1549
+ Returns:
1550
+ out (torch.tensor): The post-processed results with shape (N, max_det, 4 + 2 + extra_shape).
1551
+ """
1552
+ from functools import partial
1553
+
1554
+ from torchvision.ops import nms
1555
+
1556
+ preds = self.model(x)
1557
+ pred = preds[0] if isinstance(preds, tuple) else preds
1558
+ pred = pred.transpose(-1, -2) # shape(1,84,6300) to shape(1,6300,84)
1559
+ extra_shape = pred.shape[-1] - (4 + self.model.nc) # extras from Segment, OBB, Pose
1560
+ boxes, scores, extras = pred.split([4, self.model.nc, extra_shape], dim=2)
1561
+ scores, classes = scores.max(dim=-1)
1562
+ self.args.max_det = min(pred.shape[1], self.args.max_det) # in case num_anchors < max_det
1563
+ # (N, max_det, 4 coords + 1 class score + 1 class label + extra_shape).
1564
+ out = torch.zeros(
1565
+ boxes.shape[0],
1566
+ self.args.max_det,
1567
+ boxes.shape[-1] + 2 + extra_shape,
1568
+ device=boxes.device,
1569
+ dtype=boxes.dtype,
1570
+ )
1571
+ for i, (box, cls, score, extra) in enumerate(zip(boxes, classes, scores, extras)):
1572
+ mask = score > self.args.conf
1573
+ if self.is_tf:
1574
+ # TFLite GatherND error if mask is empty
1575
+ score *= mask
1576
+ # Explicit length otherwise reshape error, hardcoded to `self.args.max_det * 5`
1577
+ mask = score.topk(min(self.args.max_det * 5, score.shape[0])).indices
1578
+ box, score, cls, extra = box[mask], score[mask], cls[mask], extra[mask]
1579
+ if not self.obb:
1580
+ box = xywh2xyxy(box)
1581
+ if self.is_tf:
1582
+ # TFlite bug returns less boxes
1583
+ box = torch.nn.functional.pad(box, (0, 0, 0, mask.shape[0] - box.shape[0]))
1584
+ nmsbox = box.clone()
1585
+ # `8` is the minimum value experimented to get correct NMS results for obb
1586
+ multiplier = 8 if self.obb else 1
1587
+ # Normalize boxes for NMS since large values for class offset causes issue with int8 quantization
1588
+ if self.args.format == "tflite": # TFLite is already normalized
1589
+ nmsbox *= multiplier
1590
+ else:
1591
+ nmsbox = multiplier * nmsbox / torch.tensor(x.shape[2:], device=box.device, dtype=box.dtype).max()
1592
+ if not self.args.agnostic_nms: # class-specific NMS
1593
+ end = 2 if self.obb else 4
1594
+ # fully explicit expansion otherwise reshape error
1595
+ # large max_wh causes issues when quantizing
1596
+ cls_offset = cls.reshape(-1, 1).expand(nmsbox.shape[0], end)
1597
+ offbox = nmsbox[:, :end] + cls_offset * multiplier
1598
+ nmsbox = torch.cat((offbox, nmsbox[:, end:]), dim=-1)
1599
+ nms_fn = (
1600
+ partial(
1601
+ nms_rotated,
1602
+ use_triu=not (
1603
+ self.is_tf
1604
+ or (self.args.opset or 14) < 14
1605
+ or (self.args.format == "openvino" and self.args.int8) # OpenVINO int8 error with triu
1606
+ ),
1607
+ )
1608
+ if self.obb
1609
+ else nms
1610
+ )
1611
+ keep = nms_fn(
1612
+ torch.cat([nmsbox, extra], dim=-1) if self.obb else nmsbox,
1613
+ score,
1614
+ self.args.iou,
1615
+ )[: self.args.max_det]
1616
+ dets = torch.cat(
1617
+ [box[keep], score[keep].view(-1, 1), cls[keep].view(-1, 1).to(out.dtype), extra[keep]], dim=-1
1618
+ )
1619
+ # Zero-pad to max_det size to avoid reshape error
1620
+ pad = (0, 0, 0, self.args.max_det - dets.shape[0])
1621
+ out[i] = torch.nn.functional.pad(dets, pad)
1622
+ return (out, preds[1]) if self.model.task == "segment" else out
@@ -305,7 +305,7 @@ class Results(SimpleClass):
305
305
  if v is not None:
306
306
  return len(v)
307
307
 
308
- def update(self, boxes=None, masks=None, probs=None, obb=None):
308
+ def update(self, boxes=None, masks=None, probs=None, obb=None, keypoints=None):
309
309
  """
310
310
  Updates the Results object with new detection data.
311
311
 
@@ -318,6 +318,7 @@ class Results(SimpleClass):
318
318
  masks (torch.Tensor | None): A tensor of shape (N, H, W) containing segmentation masks.
319
319
  probs (torch.Tensor | None): A tensor of shape (num_classes,) containing class probabilities.
320
320
  obb (torch.Tensor | None): A tensor of shape (N, 5) containing oriented bounding box coordinates.
321
+ keypoints (torch.Tensor | None): A tensor of shape (N, 17, 3) containing keypoints.
321
322
 
322
323
  Examples:
323
324
  >>> results = model("image.jpg")
@@ -332,6 +333,8 @@ class Results(SimpleClass):
332
333
  self.probs = probs
333
334
  if obb is not None:
334
335
  self.obb = OBB(obb, self.orig_shape)
336
+ if keypoints is not None:
337
+ self.keypoints = Keypoints(keypoints, self.orig_shape)
335
338
 
336
339
  def _apply(self, fn, *args, **kwargs):
337
340
  """
@@ -38,13 +38,7 @@ class NASValidator(DetectionValidator):
38
38
  """Apply Non-maximum suppression to prediction outputs."""
39
39
  boxes = ops.xyxy2xywh(preds_in[0][0])
40
40
  preds = torch.cat((boxes, preds_in[0][1]), -1).permute(0, 2, 1)
41
- return ops.non_max_suppression(
41
+ return super().postprocess(
42
42
  preds,
43
- self.args.conf,
44
- self.args.iou,
45
- labels=self.lb,
46
- multi_label=False,
47
- agnostic=self.args.single_cls or self.args.agnostic_nms,
48
- max_det=self.args.max_det,
49
43
  max_time_img=0.5,
50
44
  )
@@ -0,0 +1,73 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ from ultralytics.engine.predictor import BasePredictor
4
+ from ultralytics.engine.results import Results
5
+ from ultralytics.utils import ops
6
+
7
+
8
+ class DetectionPredictor(BasePredictor):
9
+ """
10
+ A class extending the BasePredictor class for prediction based on a detection model.
11
+
12
+ Example:
13
+ ```python
14
+ from ultralytics.utils import ASSETS
15
+ from ultralytics.models.yolo.detect import DetectionPredictor
16
+
17
+ args = dict(model="yolo11n.pt", source=ASSETS)
18
+ predictor = DetectionPredictor(overrides=args)
19
+ predictor.predict_cli()
20
+ ```
21
+ """
22
+
23
+ def postprocess(self, preds, img, orig_imgs, **kwargs):
24
+ """Post-processes predictions and returns a list of Results objects."""
25
+ preds = ops.non_max_suppression(
26
+ preds,
27
+ self.args.conf,
28
+ self.args.iou,
29
+ self.args.classes,
30
+ self.args.agnostic_nms,
31
+ max_det=self.args.max_det,
32
+ nc=len(self.model.names),
33
+ end2end=getattr(self.model, "end2end", False),
34
+ rotated=self.args.task == "obb",
35
+ )
36
+
37
+ if not isinstance(orig_imgs, list): # input images are a torch.Tensor, not a list
38
+ orig_imgs = ops.convert_torch2numpy_batch(orig_imgs)
39
+
40
+ return self.construct_results(preds, img, orig_imgs, **kwargs)
41
+
42
+ def construct_results(self, preds, img, orig_imgs):
43
+ """
44
+ Constructs a list of result objects from the predictions.
45
+
46
+ Args:
47
+ preds (List[torch.Tensor]): List of predicted bounding boxes and scores.
48
+ img (torch.Tensor): The image after preprocessing.
49
+ orig_imgs (List[np.ndarray]): List of original images before preprocessing.
50
+
51
+ Returns:
52
+ (list): List of result objects containing the original images, image paths, class names, and bounding boxes.
53
+ """
54
+ return [
55
+ self.construct_result(pred, img, orig_img, img_path)
56
+ for pred, orig_img, img_path in zip(preds, orig_imgs, self.batch[0])
57
+ ]
58
+
59
+ def construct_result(self, pred, img, orig_img, img_path):
60
+ """
61
+ Constructs the result object from the prediction.
62
+
63
+ Args:
64
+ pred (torch.Tensor): The predicted bounding boxes and scores.
65
+ img (torch.Tensor): The image after preprocessing.
66
+ orig_img (np.ndarray): The original image before preprocessing.
67
+ img_path (str): The path to the original image.
68
+
69
+ Returns:
70
+ (Results): The result object containing the original image, image path, class names, and bounding boxes.
71
+ """
72
+ pred[:, :4] = ops.scale_boxes(img.shape[2:], pred[:, :4], orig_img.shape)
73
+ return Results(orig_img, path=img_path, names=self.model.names, boxes=pred[:, :6])
@@ -78,6 +78,7 @@ class DetectionValidator(BaseValidator):
78
78
  self.args.save_json |= self.args.val and (self.is_coco or self.is_lvis) and not self.training # run final val
79
79
  self.names = model.names
80
80
  self.nc = len(model.names)
81
+ self.end2end = getattr(model, "end2end", False)
81
82
  self.metrics.names = self.names
82
83
  self.metrics.plot = self.args.plots
83
84
  self.confusion_matrix = ConfusionMatrix(nc=self.nc, conf=self.args.conf)
@@ -96,9 +97,12 @@ class DetectionValidator(BaseValidator):
96
97
  self.args.conf,
97
98
  self.args.iou,
98
99
  labels=self.lb,
100
+ nc=self.nc,
99
101
  multi_label=True,
100
102
  agnostic=self.args.single_cls or self.args.agnostic_nms,
101
103
  max_det=self.args.max_det,
104
+ end2end=self.end2end,
105
+ rotated=self.args.task == "obb",
102
106
  )
103
107
 
104
108
  def _prepare_batch(self, si, batch):
@@ -0,0 +1,46 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ import torch
4
+
5
+ from ultralytics.engine.results import Results
6
+ from ultralytics.models.yolo.detect.predict import DetectionPredictor
7
+ from ultralytics.utils import DEFAULT_CFG, ops
8
+
9
+
10
+ class OBBPredictor(DetectionPredictor):
11
+ """
12
+ A class extending the DetectionPredictor class for prediction based on an Oriented Bounding Box (OBB) model.
13
+
14
+ Example:
15
+ ```python
16
+ from ultralytics.utils import ASSETS
17
+ from ultralytics.models.yolo.obb import OBBPredictor
18
+
19
+ args = dict(model="yolo11n-obb.pt", source=ASSETS)
20
+ predictor = OBBPredictor(overrides=args)
21
+ predictor.predict_cli()
22
+ ```
23
+ """
24
+
25
+ def __init__(self, cfg=DEFAULT_CFG, overrides=None, _callbacks=None):
26
+ """Initializes OBBPredictor with optional model and data configuration overrides."""
27
+ super().__init__(cfg, overrides, _callbacks)
28
+ self.args.task = "obb"
29
+
30
+ def construct_result(self, pred, img, orig_img, img_path):
31
+ """
32
+ Constructs the result object from the prediction.
33
+
34
+ Args:
35
+ pred (torch.Tensor): The predicted bounding boxes, scores, and rotation angles.
36
+ img (torch.Tensor): The image after preprocessing.
37
+ orig_img (np.ndarray): The original image before preprocessing.
38
+ img_path (str): The path to the original image.
39
+
40
+ Returns:
41
+ (Results): The result object containing the original image, image path, class names, and oriented bounding boxes.
42
+ """
43
+ rboxes = ops.regularize_rboxes(torch.cat([pred[:, :4], pred[:, -1:]], dim=-1))
44
+ rboxes[:, :4] = ops.scale_boxes(img.shape[2:], rboxes[:, :4], orig_img.shape, xywh=True)
45
+ obb = torch.cat([rboxes, pred[:, 4:6]], dim=-1)
46
+ return Results(orig_img, path=img_path, names=self.model.names, obb=obb)
@@ -36,20 +36,6 @@ class OBBValidator(DetectionValidator):
36
36
  val = self.data.get(self.args.split, "") # validation path
37
37
  self.is_dota = isinstance(val, str) and "DOTA" in val # is COCO
38
38
 
39
- def postprocess(self, preds):
40
- """Apply Non-maximum suppression to prediction outputs."""
41
- return ops.non_max_suppression(
42
- preds,
43
- self.args.conf,
44
- self.args.iou,
45
- labels=self.lb,
46
- nc=self.nc,
47
- multi_label=True,
48
- agnostic=self.args.single_cls or self.args.agnostic_nms,
49
- max_det=self.args.max_det,
50
- rotated=True,
51
- )
52
-
53
39
  def _process_batch(self, detections, gt_bboxes, gt_cls):
54
40
  """
55
41
  Perform computation of the correct prediction matrix for a batch of detections and ground truth bounding boxes.
@@ -1,6 +1,5 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- from ultralytics.engine.results import Results
4
3
  from ultralytics.models.yolo.detect.predict import DetectionPredictor
5
4
  from ultralytics.utils import DEFAULT_CFG, LOGGER, ops
6
5
 
@@ -30,27 +29,21 @@ class PosePredictor(DetectionPredictor):
30
29
  "See https://github.com/ultralytics/ultralytics/issues/4031."
31
30
  )
32
31
 
33
- def postprocess(self, preds, img, orig_imgs):
34
- """Return detection results for a given input image or list of images."""
35
- preds = ops.non_max_suppression(
36
- preds,
37
- self.args.conf,
38
- self.args.iou,
39
- agnostic=self.args.agnostic_nms,
40
- max_det=self.args.max_det,
41
- classes=self.args.classes,
42
- nc=len(self.model.names),
43
- )
44
-
45
- if not isinstance(orig_imgs, list): # input images are a torch.Tensor, not a list
46
- orig_imgs = ops.convert_torch2numpy_batch(orig_imgs)
47
-
48
- results = []
49
- for pred, orig_img, img_path in zip(preds, orig_imgs, self.batch[0]):
50
- pred[:, :4] = ops.scale_boxes(img.shape[2:], pred[:, :4], orig_img.shape).round()
51
- pred_kpts = pred[:, 6:].view(len(pred), *self.model.kpt_shape) if len(pred) else pred[:, 6:]
52
- pred_kpts = ops.scale_coords(img.shape[2:], pred_kpts, orig_img.shape)
53
- results.append(
54
- Results(orig_img, path=img_path, names=self.model.names, boxes=pred[:, :6], keypoints=pred_kpts)
55
- )
56
- return results
32
+ def construct_result(self, pred, img, orig_img, img_path):
33
+ """
34
+ Constructs the result object from the prediction.
35
+
36
+ Args:
37
+ pred (torch.Tensor): The predicted bounding boxes, scores, and keypoints.
38
+ img (torch.Tensor): The image after preprocessing.
39
+ orig_img (np.ndarray): The original image before preprocessing.
40
+ img_path (str): The path to the original image.
41
+
42
+ Returns:
43
+ (Results): The result object containing the original image, image path, class names, bounding boxes, and keypoints.
44
+ """
45
+ result = super().construct_result(pred, img, orig_img, img_path)
46
+ pred_kpts = pred[:, 6:].view(len(pred), *self.model.kpt_shape) if len(pred) else pred[:, 6:]
47
+ pred_kpts = ops.scale_coords(img.shape[2:], pred_kpts, orig_img.shape)
48
+ result.update(keypoints=pred_kpts)
49
+ return result