ultralytics 8.3.76__tar.gz → 8.3.78__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (251) hide show
  1. {ultralytics-8.3.76/ultralytics.egg-info → ultralytics-8.3.78}/PKG-INFO +1 -1
  2. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/__init__.py +1 -1
  3. ultralytics-8.3.78/ultralytics/cfg/models/12/yolo12-cls.yaml +32 -0
  4. ultralytics-8.3.78/ultralytics/cfg/models/12/yolo12-obb.yaml +48 -0
  5. ultralytics-8.3.78/ultralytics/cfg/models/12/yolo12-pose.yaml +49 -0
  6. ultralytics-8.3.78/ultralytics/cfg/models/12/yolo12-seg.yaml +48 -0
  7. ultralytics-8.3.78/ultralytics/cfg/models/12/yolo12.yaml +48 -0
  8. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/engine/exporter.py +4 -5
  9. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/nn/autobackend.py +8 -7
  10. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/nn/modules/__init__.py +2 -0
  11. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/nn/modules/block.py +202 -0
  12. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/nn/tasks.py +14 -3
  13. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/downloads.py +1 -0
  14. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/torch_utils.py +10 -4
  15. {ultralytics-8.3.76 → ultralytics-8.3.78/ultralytics.egg-info}/PKG-INFO +1 -1
  16. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics.egg-info/SOURCES.txt +5 -0
  17. {ultralytics-8.3.76 → ultralytics-8.3.78}/LICENSE +0 -0
  18. {ultralytics-8.3.76 → ultralytics-8.3.78}/README.md +0 -0
  19. {ultralytics-8.3.76 → ultralytics-8.3.78}/pyproject.toml +0 -0
  20. {ultralytics-8.3.76 → ultralytics-8.3.78}/setup.cfg +0 -0
  21. {ultralytics-8.3.76 → ultralytics-8.3.78}/tests/__init__.py +0 -0
  22. {ultralytics-8.3.76 → ultralytics-8.3.78}/tests/conftest.py +0 -0
  23. {ultralytics-8.3.76 → ultralytics-8.3.78}/tests/test_cli.py +0 -0
  24. {ultralytics-8.3.76 → ultralytics-8.3.78}/tests/test_cuda.py +0 -0
  25. {ultralytics-8.3.76 → ultralytics-8.3.78}/tests/test_engine.py +0 -0
  26. {ultralytics-8.3.76 → ultralytics-8.3.78}/tests/test_exports.py +0 -0
  27. {ultralytics-8.3.76 → ultralytics-8.3.78}/tests/test_integrations.py +0 -0
  28. {ultralytics-8.3.76 → ultralytics-8.3.78}/tests/test_python.py +0 -0
  29. {ultralytics-8.3.76 → ultralytics-8.3.78}/tests/test_solutions.py +0 -0
  30. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/assets/bus.jpg +0 -0
  31. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/assets/zidane.jpg +0 -0
  32. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/__init__.py +0 -0
  33. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/Argoverse.yaml +0 -0
  34. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/DOTAv1.5.yaml +0 -0
  35. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/DOTAv1.yaml +0 -0
  36. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/GlobalWheat2020.yaml +0 -0
  37. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/ImageNet.yaml +0 -0
  38. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/Objects365.yaml +0 -0
  39. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/SKU-110K.yaml +0 -0
  40. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/VOC.yaml +0 -0
  41. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/VisDrone.yaml +0 -0
  42. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/african-wildlife.yaml +0 -0
  43. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/brain-tumor.yaml +0 -0
  44. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/carparts-seg.yaml +0 -0
  45. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/coco-pose.yaml +0 -0
  46. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/coco.yaml +0 -0
  47. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/coco128-seg.yaml +0 -0
  48. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/coco128.yaml +0 -0
  49. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/coco8-pose.yaml +0 -0
  50. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/coco8-seg.yaml +0 -0
  51. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/coco8.yaml +0 -0
  52. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/crack-seg.yaml +0 -0
  53. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/dog-pose.yaml +0 -0
  54. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/dota8.yaml +0 -0
  55. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/hand-keypoints.yaml +0 -0
  56. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/lvis.yaml +0 -0
  57. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/medical-pills.yaml +0 -0
  58. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/open-images-v7.yaml +0 -0
  59. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/package-seg.yaml +0 -0
  60. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/signature.yaml +0 -0
  61. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/tiger-pose.yaml +0 -0
  62. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/datasets/xView.yaml +0 -0
  63. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/default.yaml +0 -0
  64. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/11/yolo11-cls-resnet18.yaml +0 -0
  65. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/11/yolo11-cls.yaml +0 -0
  66. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/11/yolo11-obb.yaml +0 -0
  67. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/11/yolo11-pose.yaml +0 -0
  68. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/11/yolo11-seg.yaml +0 -0
  69. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/11/yolo11.yaml +0 -0
  70. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +0 -0
  71. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +0 -0
  72. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +0 -0
  73. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +0 -0
  74. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v10/yolov10b.yaml +0 -0
  75. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v10/yolov10l.yaml +0 -0
  76. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v10/yolov10m.yaml +0 -0
  77. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v10/yolov10n.yaml +0 -0
  78. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v10/yolov10s.yaml +0 -0
  79. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v10/yolov10x.yaml +0 -0
  80. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v3/yolov3-spp.yaml +0 -0
  81. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v3/yolov3-tiny.yaml +0 -0
  82. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v3/yolov3.yaml +0 -0
  83. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v5/yolov5-p6.yaml +0 -0
  84. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v5/yolov5.yaml +0 -0
  85. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v6/yolov6.yaml +0 -0
  86. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +0 -0
  87. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +0 -0
  88. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-cls.yaml +0 -0
  89. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +0 -0
  90. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +0 -0
  91. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-ghost.yaml +0 -0
  92. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-obb.yaml +0 -0
  93. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-p2.yaml +0 -0
  94. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-p6.yaml +0 -0
  95. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +0 -0
  96. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-pose.yaml +0 -0
  97. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +0 -0
  98. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +0 -0
  99. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-seg.yaml +0 -0
  100. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-world.yaml +0 -0
  101. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8-worldv2.yaml +0 -0
  102. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v8/yolov8.yaml +0 -0
  103. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v9/yolov9c-seg.yaml +0 -0
  104. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v9/yolov9c.yaml +0 -0
  105. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v9/yolov9e-seg.yaml +0 -0
  106. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v9/yolov9e.yaml +0 -0
  107. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v9/yolov9m.yaml +0 -0
  108. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v9/yolov9s.yaml +0 -0
  109. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/models/v9/yolov9t.yaml +0 -0
  110. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/solutions/default.yaml +0 -0
  111. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/trackers/botsort.yaml +0 -0
  112. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/cfg/trackers/bytetrack.yaml +0 -0
  113. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/data/__init__.py +0 -0
  114. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/data/annotator.py +0 -0
  115. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/data/augment.py +0 -0
  116. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/data/base.py +0 -0
  117. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/data/build.py +0 -0
  118. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/data/converter.py +0 -0
  119. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/data/dataset.py +0 -0
  120. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/data/loaders.py +0 -0
  121. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/data/split_dota.py +0 -0
  122. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/data/utils.py +0 -0
  123. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/engine/__init__.py +0 -0
  124. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/engine/model.py +0 -0
  125. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/engine/predictor.py +0 -0
  126. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/engine/results.py +0 -0
  127. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/engine/trainer.py +0 -0
  128. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/engine/tuner.py +0 -0
  129. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/engine/validator.py +0 -0
  130. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/hub/__init__.py +0 -0
  131. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/hub/auth.py +0 -0
  132. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/hub/google/__init__.py +0 -0
  133. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/hub/session.py +0 -0
  134. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/hub/utils.py +0 -0
  135. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/__init__.py +0 -0
  136. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/fastsam/__init__.py +0 -0
  137. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/fastsam/model.py +0 -0
  138. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/fastsam/predict.py +0 -0
  139. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/fastsam/utils.py +0 -0
  140. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/fastsam/val.py +0 -0
  141. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/nas/__init__.py +0 -0
  142. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/nas/model.py +0 -0
  143. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/nas/predict.py +0 -0
  144. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/nas/val.py +0 -0
  145. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/rtdetr/__init__.py +0 -0
  146. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/rtdetr/model.py +0 -0
  147. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/rtdetr/predict.py +0 -0
  148. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/rtdetr/train.py +0 -0
  149. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/rtdetr/val.py +0 -0
  150. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/__init__.py +0 -0
  151. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/amg.py +0 -0
  152. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/build.py +0 -0
  153. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/model.py +0 -0
  154. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/modules/__init__.py +0 -0
  155. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/modules/blocks.py +0 -0
  156. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/modules/decoders.py +0 -0
  157. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/modules/encoders.py +0 -0
  158. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/modules/memory_attention.py +0 -0
  159. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/modules/sam.py +0 -0
  160. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/modules/tiny_encoder.py +0 -0
  161. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/modules/transformer.py +0 -0
  162. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/modules/utils.py +0 -0
  163. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/sam/predict.py +0 -0
  164. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/utils/__init__.py +0 -0
  165. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/utils/loss.py +0 -0
  166. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/utils/ops.py +0 -0
  167. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/__init__.py +0 -0
  168. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/classify/__init__.py +0 -0
  169. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/classify/predict.py +0 -0
  170. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/classify/train.py +0 -0
  171. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/classify/val.py +0 -0
  172. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/detect/__init__.py +0 -0
  173. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/detect/predict.py +0 -0
  174. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/detect/train.py +0 -0
  175. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/detect/val.py +0 -0
  176. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/model.py +0 -0
  177. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/obb/__init__.py +0 -0
  178. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/obb/predict.py +0 -0
  179. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/obb/train.py +0 -0
  180. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/obb/val.py +0 -0
  181. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/pose/__init__.py +0 -0
  182. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/pose/predict.py +0 -0
  183. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/pose/train.py +0 -0
  184. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/pose/val.py +0 -0
  185. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/segment/__init__.py +0 -0
  186. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/segment/predict.py +0 -0
  187. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/segment/train.py +0 -0
  188. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/segment/val.py +0 -0
  189. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/world/__init__.py +0 -0
  190. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/world/train.py +0 -0
  191. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/models/yolo/world/train_world.py +0 -0
  192. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/nn/__init__.py +0 -0
  193. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/nn/modules/activation.py +0 -0
  194. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/nn/modules/conv.py +0 -0
  195. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/nn/modules/head.py +0 -0
  196. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/nn/modules/transformer.py +0 -0
  197. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/nn/modules/utils.py +0 -0
  198. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/__init__.py +0 -0
  199. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/ai_gym.py +0 -0
  200. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/analytics.py +0 -0
  201. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/distance_calculation.py +0 -0
  202. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/heatmap.py +0 -0
  203. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/object_counter.py +0 -0
  204. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/parking_management.py +0 -0
  205. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/queue_management.py +0 -0
  206. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/region_counter.py +0 -0
  207. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/security_alarm.py +0 -0
  208. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/solutions.py +0 -0
  209. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/speed_estimation.py +0 -0
  210. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/streamlit_inference.py +0 -0
  211. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/solutions/trackzone.py +0 -0
  212. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/trackers/__init__.py +0 -0
  213. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/trackers/basetrack.py +0 -0
  214. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/trackers/bot_sort.py +0 -0
  215. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/trackers/byte_tracker.py +0 -0
  216. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/trackers/track.py +0 -0
  217. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/trackers/utils/__init__.py +0 -0
  218. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/trackers/utils/gmc.py +0 -0
  219. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/trackers/utils/kalman_filter.py +0 -0
  220. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/trackers/utils/matching.py +0 -0
  221. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/__init__.py +0 -0
  222. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/autobatch.py +0 -0
  223. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/benchmarks.py +0 -0
  224. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/callbacks/__init__.py +0 -0
  225. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/callbacks/base.py +0 -0
  226. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/callbacks/clearml.py +0 -0
  227. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/callbacks/comet.py +0 -0
  228. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/callbacks/dvc.py +0 -0
  229. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/callbacks/hub.py +0 -0
  230. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/callbacks/mlflow.py +0 -0
  231. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/callbacks/neptune.py +0 -0
  232. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/callbacks/raytune.py +0 -0
  233. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/callbacks/tensorboard.py +0 -0
  234. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/callbacks/wb.py +0 -0
  235. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/checks.py +0 -0
  236. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/dist.py +0 -0
  237. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/errors.py +0 -0
  238. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/files.py +0 -0
  239. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/instance.py +0 -0
  240. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/loss.py +0 -0
  241. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/metrics.py +0 -0
  242. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/ops.py +0 -0
  243. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/patches.py +0 -0
  244. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/plotting.py +0 -0
  245. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/tal.py +0 -0
  246. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/triton.py +0 -0
  247. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics/utils/tuner.py +0 -0
  248. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics.egg-info/dependency_links.txt +0 -0
  249. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics.egg-info/entry_points.txt +0 -0
  250. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics.egg-info/requires.txt +0 -0
  251. {ultralytics-8.3.76 → ultralytics-8.3.78}/ultralytics.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ultralytics
3
- Version: 8.3.76
3
+ Version: 8.3.78
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- __version__ = "8.3.76"
3
+ __version__ = "8.3.78"
4
4
 
5
5
  import os
6
6
 
@@ -0,0 +1,32 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ # YOLO12-cls image classification model
4
+ # Model docs: https://docs.ultralytics.com/models/yolo12
5
+ # Task docs: https://docs.ultralytics.com/tasks/classify
6
+
7
+ # Parameters
8
+ nc: 80 # number of classes
9
+ scales: # model compound scaling constants, i.e. 'model=yolo12n-cls.yaml' will call yolo12-cls.yaml with scale 'n'
10
+ # [depth, width, max_channels]
11
+ n: [0.50, 0.25, 1024] # summary: 152 layers, 1,820,976 parameters, 1,820,976 gradients, 3.7 GFLOPs
12
+ s: [0.50, 0.50, 1024] # summary: 152 layers, 6,206,992 parameters, 6,206,992 gradients, 13.6 GFLOPs
13
+ m: [0.50, 1.00, 512] # summary: 172 layers, 12,083,088 parameters, 12,083,088 gradients, 44.2 GFLOPs
14
+ l: [1.00, 1.00, 512] # summary: 312 layers, 15,558,640 parameters, 15,558,640 gradients, 56.9 GFLOPs
15
+ x: [1.00, 1.50, 512] # summary: 312 layers, 34,172,592 parameters, 34,172,592 gradients, 126.5 GFLOPs
16
+
17
+ # YOLO12n backbone
18
+ backbone:
19
+ # [from, repeats, module, args]
20
+ - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2
21
+ - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4
22
+ - [-1, 2, C3k2, [256, False, 0.25]]
23
+ - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8
24
+ - [-1, 2, C3k2, [512, False, 0.25]]
25
+ - [-1, 1, Conv, [512, 3, 2]] # 5-P4/16
26
+ - [-1, 4, A2C2f, [512, True, 4]]
27
+ - [-1, 1, Conv, [1024, 3, 2]] # 7-P5/32
28
+ - [-1, 4, A2C2f, [1024, True, 1]] # 8
29
+
30
+ # YOLO12n head
31
+ head:
32
+ - [-1, 1, Classify, [nc]] # Classify
@@ -0,0 +1,48 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ # YOLO12-obb Oriented Bounding Boxes (OBB) model with P3/8 - P5/32 outputs
4
+ # Model docs: https://docs.ultralytics.com/models/yolo12
5
+ # Task docs: https://docs.ultralytics.com/tasks/obb
6
+
7
+ # Parameters
8
+ nc: 80 # number of classes
9
+ scales: # model compound scaling constants, i.e. 'model=yolo12n-obb.yaml' will call yolo12-obb.yaml with scale 'n'
10
+ # [depth, width, max_channels]
11
+ n: [0.50, 0.25, 1024] # summary: 287 layers, 2,673,955 parameters, 2,673,939 gradients, 6.9 GFLOPs
12
+ s: [0.50, 0.50, 1024] # summary: 287 layers, 9,570,275 parameters, 9,570,259 gradients, 22.7 GFLOPs
13
+ m: [0.50, 1.00, 512] # summary: 307 layers, 21,048,003 parameters, 21,047,987 gradients, 71.8 GFLOPs
14
+ l: [1.00, 1.00, 512] # summary: 503 layers, 27,299,619 parameters, 27,299,603 gradients, 93.4 GFLOPs
15
+ x: [1.00, 1.50, 512] # summary: 503 layers, 61,119,939 parameters, 61,119,923 gradients, 208.6 GFLOPs
16
+
17
+ # YOLO12n backbone
18
+ backbone:
19
+ # [from, repeats, module, args]
20
+ - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2
21
+ - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4
22
+ - [-1, 2, C3k2, [256, False, 0.25]]
23
+ - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8
24
+ - [-1, 2, C3k2, [512, False, 0.25]]
25
+ - [-1, 1, Conv, [512, 3, 2]] # 5-P4/16
26
+ - [-1, 4, A2C2f, [512, True, 4]]
27
+ - [-1, 1, Conv, [1024, 3, 2]] # 7-P5/32
28
+ - [-1, 4, A2C2f, [1024, True, 1]] # 8
29
+
30
+ # YOLO12n head
31
+ head:
32
+ - [-1, 1, nn.Upsample, [None, 2, "nearest"]]
33
+ - [[-1, 6], 1, Concat, [1]] # cat backbone P4
34
+ - [-1, 2, A2C2f, [512, False, -1]] # 11
35
+
36
+ - [-1, 1, nn.Upsample, [None, 2, "nearest"]]
37
+ - [[-1, 4], 1, Concat, [1]] # cat backbone P3
38
+ - [-1, 2, A2C2f, [256, False, -1]] # 14
39
+
40
+ - [-1, 1, Conv, [256, 3, 2]]
41
+ - [[-1, 11], 1, Concat, [1]] # cat head P4
42
+ - [-1, 2, A2C2f, [512, False, -1]] # 17
43
+
44
+ - [-1, 1, Conv, [512, 3, 2]]
45
+ - [[-1, 8], 1, Concat, [1]] # cat head P5
46
+ - [-1, 2, C3k2, [1024, True]] # 20 (P5/32-large)
47
+
48
+ - [[14, 17, 20], 1, OBB, [nc, 1]] # Detect(P3, P4, P5)
@@ -0,0 +1,49 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ # YOLO12-pose keypoints/pose estimation model with P3/8 - P5/32 outputs
4
+ # Model docs: https://docs.ultralytics.com/models/yolo12
5
+ # Task docs: https://docs.ultralytics.com/tasks/pose
6
+
7
+ # Parameters
8
+ nc: 80 # number of classes
9
+ kpt_shape: [17, 3] # number of keypoints, number of dims (2 for x,y or 3 for x,y,visible)
10
+ scales: # model compound scaling constants, i.e. 'model=yolo12n-pose.yaml' will call yolo12-pose.yaml with scale 'n'
11
+ # [depth, width, max_channels]
12
+ n: [0.50, 0.25, 1024] # summary: 287 layers, 2,886,715 parameters, 2,886,699 gradients, 7.8 GFLOPs
13
+ s: [0.50, 0.50, 1024] # summary: 287 layers, 9,774,155 parameters, 9,774,139 gradients, 23.5 GFLOPs
14
+ m: [0.50, 1.00, 512] # summary: 307 layers, 21,057,753 parameters, 21,057,737 gradients, 71.8 GFLOPs
15
+ l: [1.00, 1.00, 512] # summary: 503 layers, 27,309,369 parameters, 27,309,353 gradients, 93.5 GFLOPs
16
+ x: [1.00, 1.50, 512] # summary: 503 layers, 61,134,489 parameters, 61,134,473 gradients, 208.7 GFLOPs
17
+
18
+ # YOLO12n backbone
19
+ backbone:
20
+ # [from, repeats, module, args]
21
+ - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2
22
+ - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4
23
+ - [-1, 2, C3k2, [256, False, 0.25]]
24
+ - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8
25
+ - [-1, 2, C3k2, [512, False, 0.25]]
26
+ - [-1, 1, Conv, [512, 3, 2]] # 5-P4/16
27
+ - [-1, 4, A2C2f, [512, True, 4]]
28
+ - [-1, 1, Conv, [1024, 3, 2]] # 7-P5/32
29
+ - [-1, 4, A2C2f, [1024, True, 1]] # 8
30
+
31
+ # YOLO12n head
32
+ head:
33
+ - [-1, 1, nn.Upsample, [None, 2, "nearest"]]
34
+ - [[-1, 6], 1, Concat, [1]] # cat backbone P4
35
+ - [-1, 2, A2C2f, [512, False, -1]] # 11
36
+
37
+ - [-1, 1, nn.Upsample, [None, 2, "nearest"]]
38
+ - [[-1, 4], 1, Concat, [1]] # cat backbone P3
39
+ - [-1, 2, A2C2f, [256, False, -1]] # 14
40
+
41
+ - [-1, 1, Conv, [256, 3, 2]]
42
+ - [[-1, 11], 1, Concat, [1]] # cat head P4
43
+ - [-1, 2, A2C2f, [512, False, -1]] # 17
44
+
45
+ - [-1, 1, Conv, [512, 3, 2]]
46
+ - [[-1, 8], 1, Concat, [1]] # cat head P5
47
+ - [-1, 2, C3k2, [1024, True]] # 20 (P5/32-large)
48
+
49
+ - [[14, 17, 20], 1, Pose, [nc, kpt_shape]] # Detect(P3, P4, P5)
@@ -0,0 +1,48 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ # YOLO12-seg instance segmentation model with P3/8 - P5/32 outputs
4
+ # Model docs: https://docs.ultralytics.com/models/yolo12
5
+ # Task docs: https://docs.ultralytics.com/tasks/segment
6
+
7
+ # Parameters
8
+ nc: 80 # number of classes
9
+ scales: # model compound scaling constants, i.e. 'model=yolo12n-seg.yaml' will call yolo12-seg.yaml with scale 'n'
10
+ # [depth, width, max_channels]
11
+ n: [0.50, 0.25, 1024] # summary: 294 layers, 2,855,056 parameters, 2,855,040 gradients, 10.6 GFLOPs
12
+ s: [0.50, 0.50, 1024] # summary: 294 layers, 9,938,592 parameters, 9,938,576 gradients, 35.7 GFLOPs
13
+ m: [0.50, 1.00, 512] # summary: 314 layers, 22,505,376 parameters, 22,505,360 gradients, 123.5 GFLOPs
14
+ l: [1.00, 1.00, 512] # summary: 510 layers, 28,756,992 parameters, 28,756,976 gradients, 145.1 GFLOPs
15
+ x: [1.00, 1.50, 512] # summary: 510 layers, 64,387,264 parameters, 64,387,248 gradients, 324.6 GFLOPs
16
+
17
+ # YOLO12n backbone
18
+ backbone:
19
+ # [from, repeats, module, args]
20
+ - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2
21
+ - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4
22
+ - [-1, 2, C3k2, [256, False, 0.25]]
23
+ - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8
24
+ - [-1, 2, C3k2, [512, False, 0.25]]
25
+ - [-1, 1, Conv, [512, 3, 2]] # 5-P4/16
26
+ - [-1, 4, A2C2f, [512, True, 4]]
27
+ - [-1, 1, Conv, [1024, 3, 2]] # 7-P5/32
28
+ - [-1, 4, A2C2f, [1024, True, 1]] # 8
29
+
30
+ # YOLO12n head
31
+ head:
32
+ - [-1, 1, nn.Upsample, [None, 2, "nearest"]]
33
+ - [[-1, 6], 1, Concat, [1]] # cat backbone P4
34
+ - [-1, 2, A2C2f, [512, False, -1]] # 11
35
+
36
+ - [-1, 1, nn.Upsample, [None, 2, "nearest"]]
37
+ - [[-1, 4], 1, Concat, [1]] # cat backbone P3
38
+ - [-1, 2, A2C2f, [256, False, -1]] # 14
39
+
40
+ - [-1, 1, Conv, [256, 3, 2]]
41
+ - [[-1, 11], 1, Concat, [1]] # cat head P4
42
+ - [-1, 2, A2C2f, [512, False, -1]] # 17
43
+
44
+ - [-1, 1, Conv, [512, 3, 2]]
45
+ - [[-1, 8], 1, Concat, [1]] # cat head P5
46
+ - [-1, 2, C3k2, [1024, True]] # 20 (P5/32-large)
47
+
48
+ - [[14, 17, 20], 1, Segment, [nc, 32, 256]] # Detect(P3, P4, P5)
@@ -0,0 +1,48 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ # YOLO12 object detection model with P3/8 - P5/32 outputs
4
+ # Model docs: https://docs.ultralytics.com/models/yolo12
5
+ # Task docs: https://docs.ultralytics.com/tasks/detect
6
+
7
+ # Parameters
8
+ nc: 80 # number of classes
9
+ scales: # model compound scaling constants, i.e. 'model=yolo12n.yaml' will call yolo12.yaml with scale 'n'
10
+ # [depth, width, max_channels]
11
+ n: [0.50, 0.25, 1024] # summary: 272 layers, 2,602,288 parameters, 2,602,272 gradients, 6.7 GFLOPs
12
+ s: [0.50, 0.50, 1024] # summary: 272 layers, 9,284,096 parameters, 9,284,080 gradients, 21.7 GFLOPs
13
+ m: [0.50, 1.00, 512] # summary: 292 layers, 20,199,168 parameters, 20,199,152 gradients, 68.1 GFLOPs
14
+ l: [1.00, 1.00, 512] # summary: 488 layers, 26,450,784 parameters, 26,450,768 gradients, 89.7 GFLOPs
15
+ x: [1.00, 1.50, 512] # summary: 488 layers, 59,210,784 parameters, 59,210,768 gradients, 200.3 GFLOPs
16
+
17
+ # YOLO12n backbone
18
+ backbone:
19
+ # [from, repeats, module, args]
20
+ - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2
21
+ - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4
22
+ - [-1, 2, C3k2, [256, False, 0.25]]
23
+ - [-1, 1, Conv, [256, 3, 2]] # 3-P3/8
24
+ - [-1, 2, C3k2, [512, False, 0.25]]
25
+ - [-1, 1, Conv, [512, 3, 2]] # 5-P4/16
26
+ - [-1, 4, A2C2f, [512, True, 4]]
27
+ - [-1, 1, Conv, [1024, 3, 2]] # 7-P5/32
28
+ - [-1, 4, A2C2f, [1024, True, 1]] # 8
29
+
30
+ # YOLO12n head
31
+ head:
32
+ - [-1, 1, nn.Upsample, [None, 2, "nearest"]]
33
+ - [[-1, 6], 1, Concat, [1]] # cat backbone P4
34
+ - [-1, 2, A2C2f, [512, False, -1]] # 11
35
+
36
+ - [-1, 1, nn.Upsample, [None, 2, "nearest"]]
37
+ - [[-1, 4], 1, Concat, [1]] # cat backbone P3
38
+ - [-1, 2, A2C2f, [256, False, -1]] # 14
39
+
40
+ - [-1, 1, Conv, [256, 3, 2]]
41
+ - [[-1, 11], 1, Concat, [1]] # cat head P4
42
+ - [-1, 2, A2C2f, [512, False, -1]] # 17
43
+
44
+ - [-1, 1, Conv, [512, 3, 2]]
45
+ - [[-1, 8], 1, Concat, [1]] # cat head P5
46
+ - [-1, 2, C3k2, [1024, True]] # 20 (P5/32-large)
47
+
48
+ - [[14, 17, 20], 1, Detect, [nc]] # Detect(P3, P4, P5)
@@ -309,9 +309,8 @@ class Exporter:
309
309
  "WARNING ⚠️ INT8 export requires a missing 'data' arg for calibration. "
310
310
  f"Using default 'data={self.args.data}'."
311
311
  )
312
- if tfjs:
313
- if ARM64 and LINUX:
314
- raise SystemError("TensorFlow.js export not supported on ARM64 Linux")
312
+ if tfjs and (ARM64 and LINUX):
313
+ raise SystemError("TensorFlow.js export not supported on ARM64 Linux")
315
314
 
316
315
  # Input
317
316
  im = torch.zeros(self.args.batch, 3, *self.imgsz).to(self.device)
@@ -419,7 +418,7 @@ class Exporter:
419
418
  if pb or tfjs: # pb prerequisite to tfjs
420
419
  f[6], _ = self.export_pb(keras_model=keras_model)
421
420
  if tflite:
422
- f[7], _ = self.export_tflite(keras_model=keras_model, nms=False, agnostic_nms=self.args.agnostic_nms)
421
+ f[7], _ = self.export_tflite()
423
422
  if edgetpu:
424
423
  f[8], _ = self.export_edgetpu(tflite_model=Path(f[5]) / f"{self.file.stem}_full_integer_quant.tflite")
425
424
  if tfjs:
@@ -1077,7 +1076,7 @@ class Exporter:
1077
1076
  return f, None
1078
1077
 
1079
1078
  @try_export
1080
- def export_tflite(self, keras_model, nms, agnostic_nms, prefix=colorstr("TensorFlow Lite:")):
1079
+ def export_tflite(self, prefix=colorstr("TensorFlow Lite:")):
1081
1080
  """YOLO TensorFlow Lite export."""
1082
1081
  # BUG https://github.com/ultralytics/ultralytics/issues/13436
1083
1082
  import tensorflow as tf # noqa
@@ -197,12 +197,13 @@ class AutoBackend(nn.Module):
197
197
  import onnxruntime
198
198
 
199
199
  providers = ["CPUExecutionProvider"]
200
- if cuda and "CUDAExecutionProvider" in onnxruntime.get_available_providers():
201
- providers.insert(0, "CUDAExecutionProvider")
202
- elif cuda: # Only log warning if CUDA was requested but unavailable
203
- LOGGER.warning("WARNING ⚠️ Failed to start ONNX Runtime with CUDA. Using CPU...")
204
- device = torch.device("cpu")
205
- cuda = False
200
+ if cuda:
201
+ if "CUDAExecutionProvider" in onnxruntime.get_available_providers():
202
+ providers.insert(0, "CUDAExecutionProvider")
203
+ else: # Only log warning if CUDA was requested but unavailable
204
+ LOGGER.warning("WARNING ⚠️ Failed to start ONNX Runtime with CUDA. Using CPU...")
205
+ device = torch.device("cpu")
206
+ cuda = False
206
207
  LOGGER.info(f"Using ONNX Runtime {providers[0]}")
207
208
  if onnx:
208
209
  session = onnxruntime.InferenceSession(w, providers=providers)
@@ -223,7 +224,7 @@ class AutoBackend(nn.Module):
223
224
  output_names = [x.name for x in session.get_outputs()]
224
225
  metadata = session.get_modelmeta().custom_metadata_map
225
226
  dynamic = isinstance(session.get_outputs()[0].shape[0], str)
226
- fp16 = True if "float16" in session.get_inputs()[0].type else False
227
+ fp16 = "float16" in session.get_inputs()[0].type
227
228
  if not dynamic:
228
229
  io = session.io_binding()
229
230
  bindings = []
@@ -30,6 +30,7 @@ from .block import (
30
30
  SPP,
31
31
  SPPELAN,
32
32
  SPPF,
33
+ A2C2f,
33
34
  AConv,
34
35
  ADown,
35
36
  Attention,
@@ -160,4 +161,5 @@ __all__ = (
160
161
  "PSA",
161
162
  "TorchVision",
162
163
  "Index",
164
+ "A2C2f",
163
165
  )
@@ -1154,3 +1154,205 @@ class TorchVision(nn.Module):
1154
1154
  else:
1155
1155
  y = self.m(x)
1156
1156
  return y
1157
+
1158
+
1159
+ class AAttn(nn.Module):
1160
+ """
1161
+ Area-attention module for YOLO models, providing efficient attention mechanisms.
1162
+
1163
+ This module implements an area-based attention mechanism that processes input features in a spatially-aware manner,
1164
+ making it particularly effective for object detection tasks.
1165
+
1166
+ Attributes:
1167
+ area (int): Number of areas the feature map is divided.
1168
+ num_heads (int): Number of heads into which the attention mechanism is divided.
1169
+ head_dim (int): Dimension of each attention head.
1170
+ qkv (Conv): Convolution layer for computing query, key and value tensors.
1171
+ proj (Conv): Projection convolution layer.
1172
+ pe (Conv): Position encoding convolution layer.
1173
+
1174
+ Methods:
1175
+ forward: Applies area-attention to input tensor.
1176
+
1177
+ Examples:
1178
+ >>> attn = AAttn(dim=256, num_heads=8, area=4)
1179
+ >>> x = torch.randn(1, 256, 32, 32)
1180
+ >>> output = attn(x)
1181
+ >>> print(output.shape)
1182
+ torch.Size([1, 256, 32, 32])
1183
+ """
1184
+
1185
+ def __init__(self, dim, num_heads, area=1):
1186
+ """
1187
+ Initializes an Area-attention module for YOLO models.
1188
+
1189
+ Args:
1190
+ dim (int): Number of hidden channels.
1191
+ num_heads (int): Number of heads into which the attention mechanism is divided.
1192
+ area (int): Number of areas the feature map is divided, default is 1.
1193
+ """
1194
+ super().__init__()
1195
+ self.area = area
1196
+
1197
+ self.num_heads = num_heads
1198
+ self.head_dim = head_dim = dim // num_heads
1199
+ all_head_dim = head_dim * self.num_heads
1200
+
1201
+ self.qkv = Conv(dim, all_head_dim * 3, 1, act=False)
1202
+ self.proj = Conv(all_head_dim, dim, 1, act=False)
1203
+ self.pe = Conv(all_head_dim, dim, 7, 1, 3, g=dim, act=False)
1204
+
1205
+ def forward(self, x):
1206
+ """Processes the input tensor 'x' through the area-attention."""
1207
+ B, C, H, W = x.shape
1208
+ N = H * W
1209
+
1210
+ qkv = self.qkv(x).flatten(2).transpose(1, 2)
1211
+ if self.area > 1:
1212
+ qkv = qkv.reshape(B * self.area, N // self.area, C * 3)
1213
+ B, N, _ = qkv.shape
1214
+ q, k, v = (
1215
+ qkv.view(B, N, self.num_heads, self.head_dim * 3)
1216
+ .permute(0, 2, 3, 1)
1217
+ .split([self.head_dim, self.head_dim, self.head_dim], dim=2)
1218
+ )
1219
+ attn = (q.transpose(-2, -1) @ k) * (self.head_dim**-0.5)
1220
+ attn = attn.softmax(dim=-1)
1221
+ x = v @ attn.transpose(-2, -1)
1222
+ x = x.permute(0, 3, 1, 2)
1223
+ v = v.permute(0, 3, 1, 2)
1224
+
1225
+ if self.area > 1:
1226
+ x = x.reshape(B // self.area, N * self.area, C)
1227
+ v = v.reshape(B // self.area, N * self.area, C)
1228
+ B, N, _ = x.shape
1229
+
1230
+ x = x.reshape(B, H, W, C).permute(0, 3, 1, 2)
1231
+ v = v.reshape(B, H, W, C).permute(0, 3, 1, 2)
1232
+
1233
+ x = x + self.pe(v)
1234
+ return self.proj(x)
1235
+
1236
+
1237
+ class ABlock(nn.Module):
1238
+ """
1239
+ Area-attention block module for efficient feature extraction in YOLO models.
1240
+
1241
+ This module implements an area-attention mechanism combined with a feed-forward network for processing feature maps.
1242
+ It uses a novel area-based attention approach that is more efficient than traditional self-attention while
1243
+ maintaining effectiveness.
1244
+
1245
+ Attributes:
1246
+ attn (AAttn): Area-attention module for processing spatial features.
1247
+ mlp (nn.Sequential): Multi-layer perceptron for feature transformation.
1248
+
1249
+ Methods:
1250
+ _init_weights: Initializes module weights using truncated normal distribution.
1251
+ forward: Applies area-attention and feed-forward processing to input tensor.
1252
+
1253
+ Examples:
1254
+ >>> block = ABlock(dim=256, num_heads=8, mlp_ratio=1.2, area=1)
1255
+ >>> x = torch.randn(1, 256, 32, 32)
1256
+ >>> output = block(x)
1257
+ >>> print(output.shape)
1258
+ torch.Size([1, 256, 32, 32])
1259
+ """
1260
+
1261
+ def __init__(self, dim, num_heads, mlp_ratio=1.2, area=1):
1262
+ """
1263
+ Initializes an Area-attention block module for efficient feature extraction in YOLO models.
1264
+
1265
+ This module implements an area-attention mechanism combined with a feed-forward network for processing feature
1266
+ maps. It uses a novel area-based attention approach that is more efficient than traditional self-attention
1267
+ while maintaining effectiveness.
1268
+
1269
+ Args:
1270
+ dim (int): Number of input channels.
1271
+ num_heads (int): Number of heads into which the attention mechanism is divided.
1272
+ mlp_ratio (float): Expansion ratio for MLP hidden dimension.
1273
+ area (int): Number of areas the feature map is divided.
1274
+ """
1275
+ super().__init__()
1276
+
1277
+ self.attn = AAttn(dim, num_heads=num_heads, area=area)
1278
+ mlp_hidden_dim = int(dim * mlp_ratio)
1279
+ self.mlp = nn.Sequential(Conv(dim, mlp_hidden_dim, 1), Conv(mlp_hidden_dim, dim, 1, act=False))
1280
+
1281
+ self.apply(self._init_weights)
1282
+
1283
+ def _init_weights(self, m):
1284
+ """Initialize weights using a truncated normal distribution."""
1285
+ if isinstance(m, nn.Conv2d):
1286
+ nn.init.trunc_normal_(m.weight, std=0.02)
1287
+ if m.bias is not None:
1288
+ nn.init.constant_(m.bias, 0)
1289
+
1290
+ def forward(self, x):
1291
+ """Forward pass through ABlock, applying area-attention and feed-forward layers to the input tensor."""
1292
+ x = x + self.attn(x)
1293
+ return x + self.mlp(x)
1294
+
1295
+
1296
+ class A2C2f(nn.Module):
1297
+ """
1298
+ Area-Attention C2f module for enhanced feature extraction with area-based attention mechanisms.
1299
+
1300
+ This module extends the C2f architecture by incorporating area-attention and ABlock layers for improved feature
1301
+ processing. It supports both area-attention and standard convolution modes.
1302
+
1303
+ Attributes:
1304
+ cv1 (Conv): Initial 1x1 convolution layer that reduces input channels to hidden channels.
1305
+ cv2 (Conv): Final 1x1 convolution layer that processes concatenated features.
1306
+ gamma (nn.Parameter | None): Learnable parameter for residual scaling when using area attention.
1307
+ m (nn.ModuleList): List of either ABlock or C3k modules for feature processing.
1308
+
1309
+ Methods:
1310
+ forward: Processes input through area-attention or standard convolution pathway.
1311
+
1312
+ Examples:
1313
+ >>> m = A2C2f(512, 512, n=1, a2=True, area=1)
1314
+ >>> x = torch.randn(1, 512, 32, 32)
1315
+ >>> output = m(x)
1316
+ >>> print(output.shape)
1317
+ torch.Size([1, 512, 32, 32])
1318
+ """
1319
+
1320
+ def __init__(self, c1, c2, n=1, a2=True, area=1, residual=False, mlp_ratio=2.0, e=0.5, g=1, shortcut=True):
1321
+ """
1322
+ Area-Attention C2f module for enhanced feature extraction with area-based attention mechanisms.
1323
+
1324
+ Args:
1325
+ c1 (int): Number of input channels.
1326
+ c2 (int): Number of output channels.
1327
+ n (int): Number of ABlock or C3k modules to stack.
1328
+ a2 (bool): Whether to use area attention blocks. If False, uses C3k blocks instead.
1329
+ area (int): Number of areas the feature map is divided.
1330
+ residual (bool): Whether to use residual connections with learnable gamma parameter.
1331
+ mlp_ratio (float): Expansion ratio for MLP hidden dimension.
1332
+ e (float): Channel expansion ratio for hidden channels.
1333
+ g (int): Number of groups for grouped convolutions.
1334
+ shortcut (bool): Whether to use shortcut connections in C3k blocks.
1335
+ """
1336
+ super().__init__()
1337
+ c_ = int(c2 * e) # hidden channels
1338
+ assert c_ % 32 == 0, "Dimension of ABlock be a multiple of 32."
1339
+
1340
+ self.cv1 = Conv(c1, c_, 1, 1)
1341
+ self.cv2 = Conv((1 + n) * c_, c2, 1)
1342
+
1343
+ self.gamma = nn.Parameter(0.01 * torch.ones(c2), requires_grad=True) if a2 and residual else None
1344
+ self.m = nn.ModuleList(
1345
+ nn.Sequential(*(ABlock(c_, c_ // 32, mlp_ratio, area) for _ in range(2)))
1346
+ if a2
1347
+ else C3k(c_, c_, 2, shortcut, g)
1348
+ for _ in range(n)
1349
+ )
1350
+
1351
+ def forward(self, x):
1352
+ """Forward pass through R-ELAN layer."""
1353
+ y = [self.cv1(x)]
1354
+ y.extend(m(y[-1]) for m in self.m)
1355
+ y = self.cv2(torch.cat(y, 1))
1356
+ if self.gamma is not None:
1357
+ return x + self.gamma.view(-1, len(self.gamma), 1, 1) * y
1358
+ return y
@@ -7,7 +7,6 @@ import types
7
7
  from copy import deepcopy
8
8
  from pathlib import Path
9
9
 
10
- import thop
11
10
  import torch
12
11
 
13
12
  from ultralytics.nn.modules import (
@@ -23,6 +22,7 @@ from ultralytics.nn.modules import (
23
22
  SPP,
24
23
  SPPELAN,
25
24
  SPPF,
25
+ A2C2f,
26
26
  AConv,
27
27
  ADown,
28
28
  Bottleneck,
@@ -86,6 +86,11 @@ from ultralytics.utils.torch_utils import (
86
86
  time_sync,
87
87
  )
88
88
 
89
+ try:
90
+ import thop
91
+ except ImportError:
92
+ thop = None # conda support without 'ultralytics-thop' installed
93
+
89
94
 
90
95
  class BaseModel(torch.nn.Module):
91
96
  """The BaseModel class serves as a base class for all the models in the Ultralytics YOLO family."""
@@ -633,8 +638,8 @@ class WorldModel(DetectionModel):
633
638
  (torch.Tensor): Model's output tensor.
634
639
  """
635
640
  txt_feats = (self.txt_feats if txt_feats is None else txt_feats).to(device=x.device, dtype=x.dtype)
636
- if len(txt_feats) != len(x):
637
- txt_feats = txt_feats.repeat(len(x), 1, 1)
641
+ if len(txt_feats) != len(x) or self.model[-1].export:
642
+ txt_feats = txt_feats.expand(x.shape[0], -1, -1)
638
643
  ori_txt_feats = txt_feats.clone()
639
644
  y, dt, embeddings = [], [], [] # outputs
640
645
  for m in self.model: # except the head part
@@ -981,6 +986,7 @@ def parse_model(d, ch, verbose=True): # model_dict, input_channels(3)
981
986
  PSA,
982
987
  SCDown,
983
988
  C2fCIB,
989
+ A2C2f,
984
990
  }
985
991
  )
986
992
  repeat_modules = frozenset( # modules with 'repeat' arguments
@@ -999,6 +1005,7 @@ def parse_model(d, ch, verbose=True): # model_dict, input_channels(3)
999
1005
  C2fPSA,
1000
1006
  C2fCIB,
1001
1007
  C2PSA,
1008
+ A2C2f,
1002
1009
  }
1003
1010
  )
1004
1011
  for i, (f, n, m, args) in enumerate(d["backbone"] + d["head"]): # from, number, module, args
@@ -1030,6 +1037,10 @@ def parse_model(d, ch, verbose=True): # model_dict, input_channels(3)
1030
1037
  legacy = False
1031
1038
  if scale in "mlx":
1032
1039
  args[3] = True
1040
+ if m is A2C2f:
1041
+ legacy = False
1042
+ if scale in "lx": # for L/X sizes
1043
+ args.extend((True, 1.2))
1033
1044
  elif m is AIFI:
1034
1045
  args = [ch[f], *args]
1035
1046
  elif m in frozenset({HGStem, HGBlock}):
@@ -18,6 +18,7 @@ GITHUB_ASSETS_REPO = "ultralytics/assets"
18
18
  GITHUB_ASSETS_NAMES = (
19
19
  [f"yolov8{k}{suffix}.pt" for k in "nsmlx" for suffix in ("", "-cls", "-seg", "-pose", "-obb", "-oiv7")]
20
20
  + [f"yolo11{k}{suffix}.pt" for k in "nsmlx" for suffix in ("", "-cls", "-seg", "-pose", "-obb")]
21
+ + [f"yolo12{k}{suffix}.pt" for k in "nsmlx" for suffix in ("",)] # detect models only currently
21
22
  + [f"yolov5{k}{resolution}u.pt" for k in "nsmlx" for resolution in ("", "6")]
22
23
  + [f"yolov3{k}u.pt" for k in ("", "-spp", "-tiny")]
23
24
  + [f"yolov8{k}-world.pt" for k in "smlx"]
@@ -12,7 +12,6 @@ from pathlib import Path
12
12
  from typing import Union
13
13
 
14
14
  import numpy as np
15
- import thop
16
15
  import torch
17
16
  import torch.distributed as dist
18
17
  import torch.nn as nn
@@ -31,6 +30,11 @@ from ultralytics.utils import (
31
30
  )
32
31
  from ultralytics.utils.checks import check_version
33
32
 
33
+ try:
34
+ import thop
35
+ except ImportError:
36
+ thop = None # conda support without 'ultralytics-thop' installed
37
+
34
38
  # Version checks (all default to version>=min_version)
35
39
  TORCH_1_9 = check_version(torch.__version__, "1.9.0")
36
40
  TORCH_1_13 = check_version(torch.__version__, "1.13.0")
@@ -313,8 +317,7 @@ def model_info(model, detailed=False, verbose=True, imgsz=640):
313
317
  if len(m._parameters):
314
318
  for pn, p in m.named_parameters():
315
319
  LOGGER.info(
316
- f"{i:>5g}{mn + '.' + pn:>40}{mt:>20}{p.requires_grad!r:>10}{p.numel():>12g}"
317
- f"{str(list(p.shape)):>20}{p.mean():>10.3g}{p.std():>10.3g}{str(p.dtype).replace('torch.', ''):>15}"
320
+ f"{i:>5g}{f'{mn}.{pn}':>40}{mt:>20}{p.requires_grad!r:>10}{p.numel():>12g}{str(list(p.shape)):>20}{p.mean():>10.3g}{p.std():>10.3g}{str(p.dtype).replace('torch.', ''):>15}"
318
321
  )
319
322
  else: # layers with no learnable params
320
323
  LOGGER.info(f"{i:>5g}{mn:>40}{mt:>20}{False!r:>10}{0:>12g}{str([]):>20}{'-':>10}{'-':>10}{'-':>15}")
@@ -370,6 +373,9 @@ def model_info_for_loggers(trainer):
370
373
 
371
374
  def get_flops(model, imgsz=640):
372
375
  """Return a YOLO model's FLOPs."""
376
+ if not thop:
377
+ return 0.0 # if not installed return 0.0 GFLOPs
378
+
373
379
  try:
374
380
  model = de_parallel(model)
375
381
  p = next(model.parameters())
@@ -681,7 +687,7 @@ def profile(input, ops, n=10, device=None, max_num_obj=0):
681
687
  m = m.half() if hasattr(m, "half") and isinstance(x, torch.Tensor) and x.dtype is torch.float16 else m
682
688
  tf, tb, t = 0, 0, [0, 0, 0] # dt forward, backward
683
689
  try:
684
- flops = thop.profile(deepcopy(m), inputs=[x], verbose=False)[0] / 1e9 * 2 # GFLOPs
690
+ flops = thop.profile(deepcopy(m), inputs=[x], verbose=False)[0] / 1e9 * 2 if thop else 0 # GFLOPs
685
691
  except Exception:
686
692
  flops = 0
687
693
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ultralytics
3
- Version: 8.3.76
3
+ Version: 8.3.78
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>