ultralytics-opencv-headless 8.3.248__tar.gz → 8.4.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/PKG-INFO +31 -39
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/README.md +30 -38
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/tests/__init__.py +2 -2
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/tests/conftest.py +1 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/tests/test_cuda.py +8 -2
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/tests/test_engine.py +6 -6
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/tests/test_exports.py +10 -3
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/tests/test_integrations.py +9 -9
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/tests/test_python.py +14 -14
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/tests/test_solutions.py +3 -3
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/__init__.py +1 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/__init__.py +12 -10
- ultralytics_opencv_headless-8.4.0/ultralytics/cfg/datasets/TT100K.yaml +346 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/default.yaml +3 -1
- ultralytics_opencv_headless-8.4.0/ultralytics/cfg/models/26/yolo26-cls.yaml +33 -0
- ultralytics_opencv_headless-8.4.0/ultralytics/cfg/models/26/yolo26-obb.yaml +52 -0
- ultralytics_opencv_headless-8.4.0/ultralytics/cfg/models/26/yolo26-p2.yaml +60 -0
- ultralytics_opencv_headless-8.4.0/ultralytics/cfg/models/26/yolo26-p6.yaml +60 -0
- ultralytics_opencv_headless-8.4.0/ultralytics/cfg/models/26/yolo26-pose.yaml +53 -0
- ultralytics_opencv_headless-8.4.0/ultralytics/cfg/models/26/yolo26-seg.yaml +52 -0
- ultralytics_opencv_headless-8.4.0/ultralytics/cfg/models/26/yolo26.yaml +52 -0
- ultralytics_opencv_headless-8.4.0/ultralytics/cfg/models/26/yoloe-26-seg.yaml +53 -0
- ultralytics_opencv_headless-8.4.0/ultralytics/cfg/models/26/yoloe-26.yaml +53 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/augment.py +7 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/dataset.py +1 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/engine/exporter.py +12 -3
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/engine/model.py +5 -4
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/engine/trainer.py +68 -40
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/engine/tuner.py +16 -7
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/fastsam/predict.py +1 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/detect/train.py +3 -2
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/detect/val.py +6 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/model.py +1 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/obb/predict.py +1 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/obb/train.py +1 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/pose/train.py +1 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/segment/predict.py +1 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/segment/train.py +1 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/segment/val.py +3 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/yoloe/train.py +6 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/yoloe/train_seg.py +6 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/nn/autobackend.py +15 -5
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/nn/modules/__init__.py +8 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/nn/modules/block.py +127 -8
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/nn/modules/head.py +818 -205
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/nn/tasks.py +74 -29
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/nn/text_model.py +5 -2
- ultralytics_opencv_headless-8.4.0/ultralytics/optim/__init__.py +5 -0
- ultralytics_opencv_headless-8.4.0/ultralytics/optim/muon.py +338 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/object_counter.py +1 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/benchmarks.py +2 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/callbacks/platform.py +170 -64
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/checks.py +22 -3
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/downloads.py +3 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/export/engine.py +19 -10
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/export/imx.py +23 -12
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/export/tensorflow.py +1 -41
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/loss.py +584 -203
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/metrics.py +2 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/ops.py +11 -2
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/plotting.py +3 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/tal.py +100 -20
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/torch_utils.py +1 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/tqdm.py +4 -1
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/tuner.py +0 -3
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics_opencv_headless.egg-info/PKG-INFO +31 -39
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics_opencv_headless.egg-info/SOURCES.txt +12 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/LICENSE +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/pyproject.toml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/setup.cfg +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/tests/test_cli.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/assets/bus.jpg +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/assets/zidane.jpg +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/Argoverse.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/DOTAv1.5.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/DOTAv1.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/GlobalWheat2020.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/HomeObjects-3K.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/ImageNet.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/Objects365.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/SKU-110K.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/VOC.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/VisDrone.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/african-wildlife.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/brain-tumor.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/carparts-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/coco-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/coco.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/coco128-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/coco128.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/coco8-grayscale.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/coco8-multispectral.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/coco8-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/coco8-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/coco8.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/construction-ppe.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/crack-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/dog-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/dota8-multispectral.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/dota8.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/hand-keypoints.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/kitti.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/lvis.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/medical-pills.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/open-images-v7.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/package-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/signature.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/tiger-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/datasets/xView.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/11/yolo11-cls-resnet18.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/11/yolo11-cls.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/11/yolo11-obb.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/11/yolo11-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/11/yolo11-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/11/yolo11.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/11/yoloe-11-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/11/yoloe-11.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/12/yolo12-cls.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/12/yolo12-obb.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/12/yolo12-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/12/yolo12-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/12/yolo12.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v10/yolov10b.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v10/yolov10l.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v10/yolov10m.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v10/yolov10n.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v10/yolov10s.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v10/yolov10x.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v3/yolov3-spp.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v3/yolov3-tiny.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v3/yolov3.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v5/yolov5-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v5/yolov5.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v6/yolov6.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yoloe-v8-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yoloe-v8.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-cls.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-ghost.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-obb.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-p2.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-world.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8-worldv2.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v8/yolov8.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v9/yolov9c-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v9/yolov9c.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v9/yolov9e-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v9/yolov9e.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v9/yolov9m.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v9/yolov9s.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/models/v9/yolov9t.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/trackers/botsort.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/cfg/trackers/bytetrack.yaml +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/annotator.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/base.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/build.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/converter.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/loaders.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/scripts/download_weights.sh +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/scripts/get_coco.sh +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/scripts/get_coco128.sh +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/scripts/get_imagenet.sh +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/split.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/split_dota.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/data/utils.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/engine/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/engine/predictor.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/engine/results.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/engine/validator.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/hub/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/hub/auth.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/hub/google/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/hub/session.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/hub/utils.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/fastsam/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/fastsam/model.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/fastsam/utils.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/fastsam/val.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/nas/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/nas/model.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/nas/predict.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/nas/val.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/rtdetr/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/rtdetr/model.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/rtdetr/predict.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/rtdetr/train.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/rtdetr/val.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/amg.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/build.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/build_sam3.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/model.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/modules/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/modules/blocks.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/modules/decoders.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/modules/encoders.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/modules/memory_attention.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/modules/sam.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/modules/tiny_encoder.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/modules/transformer.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/modules/utils.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/predict.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/sam3/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/sam3/decoder.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/sam3/encoder.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/sam3/geometry_encoders.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/sam3/maskformer_segmentation.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/sam3/model_misc.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/sam3/necks.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/sam3/sam3_image.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/sam3/text_encoder_ve.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/sam3/vitdet.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/sam/sam3/vl_combiner.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/utils/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/utils/loss.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/utils/ops.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/classify/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/classify/predict.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/classify/train.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/classify/val.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/detect/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/detect/predict.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/obb/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/obb/val.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/pose/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/pose/predict.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/pose/val.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/segment/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/world/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/world/train.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/world/train_world.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/yoloe/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/yoloe/predict.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/models/yolo/yoloe/val.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/nn/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/nn/modules/activation.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/nn/modules/conv.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/nn/modules/transformer.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/nn/modules/utils.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/py.typed +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/ai_gym.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/analytics.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/config.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/distance_calculation.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/heatmap.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/instance_segmentation.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/object_blurrer.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/object_cropper.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/parking_management.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/queue_management.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/region_counter.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/security_alarm.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/similarity_search.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/solutions.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/speed_estimation.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/streamlit_inference.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/templates/similarity-search.html +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/trackzone.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/solutions/vision_eye.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/trackers/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/trackers/basetrack.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/trackers/bot_sort.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/trackers/byte_tracker.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/trackers/track.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/trackers/utils/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/trackers/utils/gmc.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/trackers/utils/kalman_filter.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/trackers/utils/matching.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/autobatch.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/autodevice.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/callbacks/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/callbacks/base.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/callbacks/clearml.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/callbacks/comet.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/callbacks/dvc.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/callbacks/hub.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/callbacks/mlflow.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/callbacks/neptune.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/callbacks/raytune.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/callbacks/tensorboard.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/callbacks/wb.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/cpu.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/dist.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/errors.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/events.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/export/__init__.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/files.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/git.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/instance.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/logger.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/nms.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/patches.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics/utils/triton.py +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics_opencv_headless.egg-info/dependency_links.txt +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics_opencv_headless.egg-info/entry_points.txt +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics_opencv_headless.egg-info/requires.txt +0 -0
- {ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/ultralytics_opencv_headless.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ultralytics-opencv-headless
|
|
3
|
-
Version: 8.
|
|
3
|
+
Version: 8.4.0
|
|
4
4
|
Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
|
|
5
5
|
Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
|
|
6
6
|
Maintainer-email: Ultralytics <hello@ultralytics.com>
|
|
@@ -98,7 +98,6 @@ Dynamic: license-file
|
|
|
98
98
|
<div>
|
|
99
99
|
<a href="https://github.com/ultralytics/ultralytics/actions/workflows/ci.yml"><img src="https://github.com/ultralytics/ultralytics/actions/workflows/ci.yml/badge.svg" alt="Ultralytics CI"></a>
|
|
100
100
|
<a href="https://clickpy.clickhouse.com/dashboard/ultralytics"><img src="https://static.pepy.tech/badge/ultralytics" alt="Ultralytics Downloads"></a>
|
|
101
|
-
<a href="https://zenodo.org/badge/latestdoi/264818686"><img src="https://zenodo.org/badge/264818686.svg" alt="Ultralytics YOLO Citation"></a>
|
|
102
101
|
<a href="https://discord.com/invite/ultralytics"><img alt="Ultralytics Discord" src="https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue"></a>
|
|
103
102
|
<a href="https://community.ultralytics.com/"><img alt="Ultralytics Forums" src="https://img.shields.io/discourse/users?server=https%3A%2F%2Fcommunity.ultralytics.com&logo=discourse&label=Forums&color=blue"></a>
|
|
104
103
|
<a href="https://www.reddit.com/r/ultralytics/"><img alt="Ultralytics Reddit" src="https://img.shields.io/reddit/subreddit-subscribers/ultralytics?style=flat&logo=reddit&logoColor=white&label=Reddit&color=blue"></a>
|
|
@@ -166,8 +165,8 @@ For alternative installation methods, including [Conda](https://anaconda.org/con
|
|
|
166
165
|
You can use Ultralytics YOLO directly from the Command Line Interface (CLI) with the `yolo` command:
|
|
167
166
|
|
|
168
167
|
```bash
|
|
169
|
-
# Predict using a pretrained YOLO model (e.g.,
|
|
170
|
-
yolo predict model=
|
|
168
|
+
# Predict using a pretrained YOLO model (e.g., YOLO26n) on an image
|
|
169
|
+
yolo predict model=yolo26n.pt source='https://ultralytics.com/images/bus.jpg'
|
|
171
170
|
```
|
|
172
171
|
|
|
173
172
|
The `yolo` command supports various tasks and modes, accepting additional arguments like `imgsz=640`. Explore the YOLO [CLI Docs](https://docs.ultralytics.com/usage/cli/) for more examples.
|
|
@@ -179,8 +178,8 @@ Ultralytics YOLO can also be integrated directly into your Python projects. It a
|
|
|
179
178
|
```python
|
|
180
179
|
from ultralytics import YOLO
|
|
181
180
|
|
|
182
|
-
# Load a pretrained
|
|
183
|
-
model = YOLO("
|
|
181
|
+
# Load a pretrained YOLO26n model
|
|
182
|
+
model = YOLO("yolo26n.pt")
|
|
184
183
|
|
|
185
184
|
# Train the model on the COCO8 dataset for 100 epochs
|
|
186
185
|
train_results = model.train(
|
|
@@ -207,7 +206,7 @@ Discover more examples in the YOLO [Python Docs](https://docs.ultralytics.com/us
|
|
|
207
206
|
|
|
208
207
|
## ✨ Models
|
|
209
208
|
|
|
210
|
-
Ultralytics supports a wide range of YOLO models, from early versions like [YOLOv3](https://docs.ultralytics.com/models/yolov3/) to the latest [
|
|
209
|
+
Ultralytics supports a wide range of YOLO models, from early versions like [YOLOv3](https://docs.ultralytics.com/models/yolov3/) to the latest [YOLO26](https://docs.ultralytics.com/models/yolo26/). The tables below showcase YOLO26 models pretrained on the [COCO](https://docs.ultralytics.com/datasets/detect/coco/) dataset for [Detection](https://docs.ultralytics.com/tasks/detect/), [Segmentation](https://docs.ultralytics.com/tasks/segment/), and [Pose Estimation](https://docs.ultralytics.com/tasks/pose/). Additionally, [Classification](https://docs.ultralytics.com/tasks/classify/) models pretrained on the [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/) dataset are available. [Tracking](https://docs.ultralytics.com/modes/track/) mode is compatible with all Detection, Segmentation, and Pose models. All [Models](https://docs.ultralytics.com/models/) are automatically downloaded from the latest Ultralytics [release](https://github.com/ultralytics/assets/releases) upon first use.
|
|
211
210
|
|
|
212
211
|
<a href="https://docs.ultralytics.com/tasks/" target="_blank">
|
|
213
212
|
<img width="100%" src="https://github.com/ultralytics/docs/releases/download/0/ultralytics-yolov8-tasks-banner.avif" alt="Ultralytics YOLO supported tasks">
|
|
@@ -221,11 +220,11 @@ Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usa
|
|
|
221
220
|
|
|
222
221
|
| Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
223
222
|
| ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
224
|
-
| [
|
|
225
|
-
| [
|
|
226
|
-
| [
|
|
227
|
-
| [
|
|
228
|
-
| [
|
|
223
|
+
| [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640 | 40.9 | 38.9 ± 0.7 | 1.7 ± 0.0 | 2.4 | 5.4 |
|
|
224
|
+
| [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640 | 48.6 | 87.2 ± 0.9 | 2.5 ± 0.0 | 9.5 | 20.7 |
|
|
225
|
+
| [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640 | 53.1 | 220.0 ± 1.4 | 4.7 ± 0.1 | 20.4 | 68.2 |
|
|
226
|
+
| [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640 | 55.0 | 286.2 ± 2.0 | 6.2 ± 0.2 | 24.8 | 86.4 |
|
|
227
|
+
| [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640 | 57.5 | 525.8 ± 4.0 | 11.8 ± 0.2 | 55.7 | 193.9 |
|
|
229
228
|
|
|
230
229
|
- **mAP<sup>val</sup>** values refer to single-model single-scale performance on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val detect data=coco.yaml device=0`
|
|
231
230
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val detect data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -238,11 +237,11 @@ Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) fo
|
|
|
238
237
|
|
|
239
238
|
| Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
240
239
|
| -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
241
|
-
| [
|
|
242
|
-
| [
|
|
243
|
-
| [
|
|
244
|
-
| [
|
|
245
|
-
| [
|
|
240
|
+
| [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.8 | 9.1 |
|
|
241
|
+
| [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.7 | 34.2 |
|
|
242
|
+
| [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 24.8 | 121.5 |
|
|
243
|
+
| [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 29.2 | 139.8 |
|
|
244
|
+
| [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 65.5 | 313.5 |
|
|
246
245
|
|
|
247
246
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val segment data=coco.yaml device=0`
|
|
248
247
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val segment data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -255,11 +254,11 @@ Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/)
|
|
|
255
254
|
|
|
256
255
|
| Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) at 224 |
|
|
257
256
|
| -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ |
|
|
258
|
-
| [
|
|
259
|
-
| [
|
|
260
|
-
| [
|
|
261
|
-
| [
|
|
262
|
-
| [
|
|
257
|
+
| [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224 | 71.4 | 90.1 | 5.0 ± 0.3 | 1.1 ± 0.0 | 2.8 | 0.5 |
|
|
258
|
+
| [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224 | 76.0 | 92.9 | 7.9 ± 0.2 | 1.3 ± 0.0 | 6.7 | 1.6 |
|
|
259
|
+
| [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224 | 78.1 | 94.2 | 17.2 ± 0.4 | 2.0 ± 0.0 | 11.6 | 4.9 |
|
|
260
|
+
| [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224 | 79.0 | 94.6 | 23.2 ± 0.3 | 2.8 ± 0.0 | 14.1 | 6.2 |
|
|
261
|
+
| [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224 | 79.9 | 95.0 | 41.4 ± 0.9 | 3.8 ± 0.0 | 29.6 | 13.6 |
|
|
263
262
|
|
|
264
263
|
- **acc** values represent model accuracy on the [ImageNet](https://www.image-net.org/) dataset validation set. <br>Reproduce with `yolo val classify data=path/to/ImageNet device=0`
|
|
265
264
|
- **Speed** metrics are averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`
|
|
@@ -272,11 +271,11 @@ See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usa
|
|
|
272
271
|
|
|
273
272
|
| Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
274
273
|
| ---------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
275
|
-
| [
|
|
276
|
-
| [
|
|
277
|
-
| [
|
|
278
|
-
| [
|
|
279
|
-
| [
|
|
274
|
+
| [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 56.9 | 83.0 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
|
|
275
|
+
| [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.1 | 86.8 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
|
|
276
|
+
| [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.9 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
|
|
277
|
+
| [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.8 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
|
|
278
|
+
| [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.7 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
|
|
280
279
|
|
|
281
280
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO Keypoints val2017](https://docs.ultralytics.com/datasets/pose/coco/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val pose data=coco-pose.yaml device=0`
|
|
282
281
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
|
|
@@ -289,11 +288,11 @@ Check the [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples
|
|
|
289
288
|
|
|
290
289
|
| Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
291
290
|
| -------------------------------------------------------------------------------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
292
|
-
| [
|
|
293
|
-
| [
|
|
294
|
-
| [
|
|
295
|
-
| [
|
|
296
|
-
| [
|
|
291
|
+
| [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
|
|
292
|
+
| [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 79.8 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
|
|
293
|
+
| [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
|
|
294
|
+
| [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 81.4 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
|
|
295
|
+
| [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 82.1 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
|
|
297
296
|
|
|
298
297
|
- **mAP<sup>test</sup>** values are for single-model multiscale performance on the [DOTAv1 test set](https://captain-whu.github.io/DOTA/dataset.html). <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to the [DOTA evaluation server](https://captain-whu.github.io/DOTA/evaluation.html).
|
|
299
298
|
- **Speed** metrics are averaged over [DOTAv1 val images](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10) using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
|
|
@@ -328,13 +327,6 @@ Our key integrations with leading AI platforms extend the functionality of Ultra
|
|
|
328
327
|
| :-----------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: |
|
|
329
328
|
| Streamline YOLO workflows: Label, train, and deploy effortlessly with [Ultralytics HUB](https://hub.ultralytics.com/). Try now! | Track experiments, hyperparameters, and results with [Weights & Biases](https://docs.ultralytics.com/integrations/weights-biases/). | Free forever, [Comet ML](https://docs.ultralytics.com/integrations/comet/) lets you save YOLO models, resume training, and interactively visualize predictions. | Run YOLO inference up to 6x faster with [Neural Magic DeepSparse](https://docs.ultralytics.com/integrations/neural-magic/). |
|
|
330
329
|
|
|
331
|
-
## 🌟 Ultralytics HUB
|
|
332
|
-
|
|
333
|
-
Experience seamless AI with [Ultralytics HUB](https://hub.ultralytics.com/), the all-in-one platform for data visualization, training YOLO models, and deployment—no coding required. Transform images into actionable insights and bring your AI visions to life effortlessly using our cutting-edge platform and user-friendly [Ultralytics App](https://www.ultralytics.com/app-install). Start your journey for **Free** today!
|
|
334
|
-
|
|
335
|
-
<a href="https://www.ultralytics.com/hub" target="_blank">
|
|
336
|
-
<img width="100%" src="https://github.com/ultralytics/assets/raw/main/im/ultralytics-hub.png" alt="Ultralytics HUB preview image"></a>
|
|
337
|
-
|
|
338
330
|
## 🤝 Contribute
|
|
339
331
|
|
|
340
332
|
We thrive on community collaboration! Ultralytics YOLO wouldn't be the SOTA framework it is without contributions from developers like you. Please see our [Contributing Guide](https://docs.ultralytics.com/help/contributing/) to get started. We also welcome your feedback—share your experience by completing our [Survey](https://www.ultralytics.com/survey?utm_source=github&utm_medium=social&utm_campaign=Survey). A huge **Thank You** 🙏 to everyone who contributes!
|
|
@@ -9,7 +9,6 @@
|
|
|
9
9
|
<div>
|
|
10
10
|
<a href="https://github.com/ultralytics/ultralytics/actions/workflows/ci.yml"><img src="https://github.com/ultralytics/ultralytics/actions/workflows/ci.yml/badge.svg" alt="Ultralytics CI"></a>
|
|
11
11
|
<a href="https://clickpy.clickhouse.com/dashboard/ultralytics"><img src="https://static.pepy.tech/badge/ultralytics" alt="Ultralytics Downloads"></a>
|
|
12
|
-
<a href="https://zenodo.org/badge/latestdoi/264818686"><img src="https://zenodo.org/badge/264818686.svg" alt="Ultralytics YOLO Citation"></a>
|
|
13
12
|
<a href="https://discord.com/invite/ultralytics"><img alt="Ultralytics Discord" src="https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue"></a>
|
|
14
13
|
<a href="https://community.ultralytics.com/"><img alt="Ultralytics Forums" src="https://img.shields.io/discourse/users?server=https%3A%2F%2Fcommunity.ultralytics.com&logo=discourse&label=Forums&color=blue"></a>
|
|
15
14
|
<a href="https://www.reddit.com/r/ultralytics/"><img alt="Ultralytics Reddit" src="https://img.shields.io/reddit/subreddit-subscribers/ultralytics?style=flat&logo=reddit&logoColor=white&label=Reddit&color=blue"></a>
|
|
@@ -77,8 +76,8 @@ For alternative installation methods, including [Conda](https://anaconda.org/con
|
|
|
77
76
|
You can use Ultralytics YOLO directly from the Command Line Interface (CLI) with the `yolo` command:
|
|
78
77
|
|
|
79
78
|
```bash
|
|
80
|
-
# Predict using a pretrained YOLO model (e.g.,
|
|
81
|
-
yolo predict model=
|
|
79
|
+
# Predict using a pretrained YOLO model (e.g., YOLO26n) on an image
|
|
80
|
+
yolo predict model=yolo26n.pt source='https://ultralytics.com/images/bus.jpg'
|
|
82
81
|
```
|
|
83
82
|
|
|
84
83
|
The `yolo` command supports various tasks and modes, accepting additional arguments like `imgsz=640`. Explore the YOLO [CLI Docs](https://docs.ultralytics.com/usage/cli/) for more examples.
|
|
@@ -90,8 +89,8 @@ Ultralytics YOLO can also be integrated directly into your Python projects. It a
|
|
|
90
89
|
```python
|
|
91
90
|
from ultralytics import YOLO
|
|
92
91
|
|
|
93
|
-
# Load a pretrained
|
|
94
|
-
model = YOLO("
|
|
92
|
+
# Load a pretrained YOLO26n model
|
|
93
|
+
model = YOLO("yolo26n.pt")
|
|
95
94
|
|
|
96
95
|
# Train the model on the COCO8 dataset for 100 epochs
|
|
97
96
|
train_results = model.train(
|
|
@@ -118,7 +117,7 @@ Discover more examples in the YOLO [Python Docs](https://docs.ultralytics.com/us
|
|
|
118
117
|
|
|
119
118
|
## ✨ Models
|
|
120
119
|
|
|
121
|
-
Ultralytics supports a wide range of YOLO models, from early versions like [YOLOv3](https://docs.ultralytics.com/models/yolov3/) to the latest [
|
|
120
|
+
Ultralytics supports a wide range of YOLO models, from early versions like [YOLOv3](https://docs.ultralytics.com/models/yolov3/) to the latest [YOLO26](https://docs.ultralytics.com/models/yolo26/). The tables below showcase YOLO26 models pretrained on the [COCO](https://docs.ultralytics.com/datasets/detect/coco/) dataset for [Detection](https://docs.ultralytics.com/tasks/detect/), [Segmentation](https://docs.ultralytics.com/tasks/segment/), and [Pose Estimation](https://docs.ultralytics.com/tasks/pose/). Additionally, [Classification](https://docs.ultralytics.com/tasks/classify/) models pretrained on the [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/) dataset are available. [Tracking](https://docs.ultralytics.com/modes/track/) mode is compatible with all Detection, Segmentation, and Pose models. All [Models](https://docs.ultralytics.com/models/) are automatically downloaded from the latest Ultralytics [release](https://github.com/ultralytics/assets/releases) upon first use.
|
|
122
121
|
|
|
123
122
|
<a href="https://docs.ultralytics.com/tasks/" target="_blank">
|
|
124
123
|
<img width="100%" src="https://github.com/ultralytics/docs/releases/download/0/ultralytics-yolov8-tasks-banner.avif" alt="Ultralytics YOLO supported tasks">
|
|
@@ -132,11 +131,11 @@ Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usa
|
|
|
132
131
|
|
|
133
132
|
| Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
134
133
|
| ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
135
|
-
| [
|
|
136
|
-
| [
|
|
137
|
-
| [
|
|
138
|
-
| [
|
|
139
|
-
| [
|
|
134
|
+
| [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640 | 40.9 | 38.9 ± 0.7 | 1.7 ± 0.0 | 2.4 | 5.4 |
|
|
135
|
+
| [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640 | 48.6 | 87.2 ± 0.9 | 2.5 ± 0.0 | 9.5 | 20.7 |
|
|
136
|
+
| [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640 | 53.1 | 220.0 ± 1.4 | 4.7 ± 0.1 | 20.4 | 68.2 |
|
|
137
|
+
| [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640 | 55.0 | 286.2 ± 2.0 | 6.2 ± 0.2 | 24.8 | 86.4 |
|
|
138
|
+
| [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640 | 57.5 | 525.8 ± 4.0 | 11.8 ± 0.2 | 55.7 | 193.9 |
|
|
140
139
|
|
|
141
140
|
- **mAP<sup>val</sup>** values refer to single-model single-scale performance on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val detect data=coco.yaml device=0`
|
|
142
141
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val detect data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -149,11 +148,11 @@ Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) fo
|
|
|
149
148
|
|
|
150
149
|
| Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
151
150
|
| -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
152
|
-
| [
|
|
153
|
-
| [
|
|
154
|
-
| [
|
|
155
|
-
| [
|
|
156
|
-
| [
|
|
151
|
+
| [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.8 | 9.1 |
|
|
152
|
+
| [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.7 | 34.2 |
|
|
153
|
+
| [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 24.8 | 121.5 |
|
|
154
|
+
| [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 29.2 | 139.8 |
|
|
155
|
+
| [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 65.5 | 313.5 |
|
|
157
156
|
|
|
158
157
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val segment data=coco.yaml device=0`
|
|
159
158
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val segment data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -166,11 +165,11 @@ Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/)
|
|
|
166
165
|
|
|
167
166
|
| Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) at 224 |
|
|
168
167
|
| -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ |
|
|
169
|
-
| [
|
|
170
|
-
| [
|
|
171
|
-
| [
|
|
172
|
-
| [
|
|
173
|
-
| [
|
|
168
|
+
| [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224 | 71.4 | 90.1 | 5.0 ± 0.3 | 1.1 ± 0.0 | 2.8 | 0.5 |
|
|
169
|
+
| [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224 | 76.0 | 92.9 | 7.9 ± 0.2 | 1.3 ± 0.0 | 6.7 | 1.6 |
|
|
170
|
+
| [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224 | 78.1 | 94.2 | 17.2 ± 0.4 | 2.0 ± 0.0 | 11.6 | 4.9 |
|
|
171
|
+
| [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224 | 79.0 | 94.6 | 23.2 ± 0.3 | 2.8 ± 0.0 | 14.1 | 6.2 |
|
|
172
|
+
| [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224 | 79.9 | 95.0 | 41.4 ± 0.9 | 3.8 ± 0.0 | 29.6 | 13.6 |
|
|
174
173
|
|
|
175
174
|
- **acc** values represent model accuracy on the [ImageNet](https://www.image-net.org/) dataset validation set. <br>Reproduce with `yolo val classify data=path/to/ImageNet device=0`
|
|
176
175
|
- **Speed** metrics are averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`
|
|
@@ -183,11 +182,11 @@ See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usa
|
|
|
183
182
|
|
|
184
183
|
| Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
185
184
|
| ---------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
186
|
-
| [
|
|
187
|
-
| [
|
|
188
|
-
| [
|
|
189
|
-
| [
|
|
190
|
-
| [
|
|
185
|
+
| [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 56.9 | 83.0 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
|
|
186
|
+
| [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.1 | 86.8 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
|
|
187
|
+
| [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.9 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
|
|
188
|
+
| [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.8 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
|
|
189
|
+
| [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.7 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
|
|
191
190
|
|
|
192
191
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO Keypoints val2017](https://docs.ultralytics.com/datasets/pose/coco/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val pose data=coco-pose.yaml device=0`
|
|
193
192
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
|
|
@@ -200,11 +199,11 @@ Check the [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples
|
|
|
200
199
|
|
|
201
200
|
| Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
202
201
|
| -------------------------------------------------------------------------------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
203
|
-
| [
|
|
204
|
-
| [
|
|
205
|
-
| [
|
|
206
|
-
| [
|
|
207
|
-
| [
|
|
202
|
+
| [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
|
|
203
|
+
| [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 79.8 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
|
|
204
|
+
| [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
|
|
205
|
+
| [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 81.4 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
|
|
206
|
+
| [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 82.1 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
|
|
208
207
|
|
|
209
208
|
- **mAP<sup>test</sup>** values are for single-model multiscale performance on the [DOTAv1 test set](https://captain-whu.github.io/DOTA/dataset.html). <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to the [DOTA evaluation server](https://captain-whu.github.io/DOTA/evaluation.html).
|
|
210
209
|
- **Speed** metrics are averaged over [DOTAv1 val images](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10) using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
|
|
@@ -239,13 +238,6 @@ Our key integrations with leading AI platforms extend the functionality of Ultra
|
|
|
239
238
|
| :-----------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: |
|
|
240
239
|
| Streamline YOLO workflows: Label, train, and deploy effortlessly with [Ultralytics HUB](https://hub.ultralytics.com/). Try now! | Track experiments, hyperparameters, and results with [Weights & Biases](https://docs.ultralytics.com/integrations/weights-biases/). | Free forever, [Comet ML](https://docs.ultralytics.com/integrations/comet/) lets you save YOLO models, resume training, and interactively visualize predictions. | Run YOLO inference up to 6x faster with [Neural Magic DeepSparse](https://docs.ultralytics.com/integrations/neural-magic/). |
|
|
241
240
|
|
|
242
|
-
## 🌟 Ultralytics HUB
|
|
243
|
-
|
|
244
|
-
Experience seamless AI with [Ultralytics HUB](https://hub.ultralytics.com/), the all-in-one platform for data visualization, training YOLO models, and deployment—no coding required. Transform images into actionable insights and bring your AI visions to life effortlessly using our cutting-edge platform and user-friendly [Ultralytics App](https://www.ultralytics.com/app-install). Start your journey for **Free** today!
|
|
245
|
-
|
|
246
|
-
<a href="https://www.ultralytics.com/hub" target="_blank">
|
|
247
|
-
<img width="100%" src="https://github.com/ultralytics/assets/raw/main/im/ultralytics-hub.png" alt="Ultralytics HUB preview image"></a>
|
|
248
|
-
|
|
249
241
|
## 🤝 Contribute
|
|
250
242
|
|
|
251
243
|
We thrive on community collaboration! Ultralytics YOLO wouldn't be the SOTA framework it is without contributions from developers like you. Please see our [Contributing Guide](https://docs.ultralytics.com/help/contributing/) to get started. We also welcome your feedback—share your experience by completing our [Survey](https://www.ultralytics.com/survey?utm_source=github&utm_medium=social&utm_campaign=Survey). A huge **Thank You** 🙏 to everyone who contributes!
|
|
@@ -4,8 +4,8 @@ from ultralytics.cfg import TASK2DATA, TASK2MODEL, TASKS
|
|
|
4
4
|
from ultralytics.utils import ASSETS, WEIGHTS_DIR, checks
|
|
5
5
|
|
|
6
6
|
# Constants used in tests
|
|
7
|
-
MODEL = WEIGHTS_DIR / "path with spaces" / "
|
|
8
|
-
CFG = "
|
|
7
|
+
MODEL = WEIGHTS_DIR / "path with spaces" / "yolo26n.pt" # test spaces in path
|
|
8
|
+
CFG = "yolo26n.yaml"
|
|
9
9
|
SOURCE = ASSETS / "bus.jpg"
|
|
10
10
|
SOURCES_LIST = [ASSETS / "bus.jpg", ASSETS, ASSETS / "*", ASSETS / "**/*.jpg"]
|
|
11
11
|
CUDA_IS_AVAILABLE = checks.cuda_is_available()
|
|
@@ -50,7 +50,7 @@ def pytest_terminal_summary(terminalreporter, exitstatus, config):
|
|
|
50
50
|
|
|
51
51
|
# Remove files
|
|
52
52
|
models = [path for x in {"*.onnx", "*.torchscript"} for path in WEIGHTS_DIR.rglob(x)]
|
|
53
|
-
for file in ["decelera_portrait_min.mov", "bus.jpg", "
|
|
53
|
+
for file in ["decelera_portrait_min.mov", "bus.jpg", "yolo26n.onnx", "yolo26n.torchscript", *models]:
|
|
54
54
|
Path(file).unlink(missing_ok=True)
|
|
55
55
|
|
|
56
56
|
# Remove directories
|
{ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/tests/test_cuda.py
RENAMED
|
@@ -41,7 +41,7 @@ def test_checks():
|
|
|
41
41
|
@pytest.mark.skipif(not DEVICES, reason="No CUDA devices available")
|
|
42
42
|
def test_amp():
|
|
43
43
|
"""Test AMP training checks."""
|
|
44
|
-
model = YOLO("
|
|
44
|
+
model = YOLO("yolo26n.pt").model.to(f"cuda:{DEVICES[0]}")
|
|
45
45
|
assert check_amp(model)
|
|
46
46
|
|
|
47
47
|
|
|
@@ -91,6 +91,12 @@ def test_export_onnx_matrix(task, dynamic, int8, half, batch, simplify, nms):
|
|
|
91
91
|
)
|
|
92
92
|
def test_export_engine_matrix(task, dynamic, int8, half, batch):
|
|
93
93
|
"""Test YOLO model export to TensorRT format for various configurations and run inference."""
|
|
94
|
+
import tensorrt as trt
|
|
95
|
+
|
|
96
|
+
is_trt10 = int(trt.__version__.split(".", 1)[0]) >= 10
|
|
97
|
+
if is_trt10 and int8 and dynamic:
|
|
98
|
+
pytest.skip("YOLO26 INT8+dynamic export requires explicit quantization on TensorRT 10+")
|
|
99
|
+
|
|
94
100
|
file = YOLO(TASK2MODEL[task]).export(
|
|
95
101
|
format="engine",
|
|
96
102
|
imgsz=32,
|
|
@@ -126,7 +132,7 @@ def test_train():
|
|
|
126
132
|
@pytest.mark.skipif(not DEVICES, reason="No CUDA devices available")
|
|
127
133
|
def test_predict_multiple_devices():
|
|
128
134
|
"""Validate model prediction consistency across CPU and CUDA devices."""
|
|
129
|
-
model = YOLO("
|
|
135
|
+
model = YOLO("yolo26n.pt")
|
|
130
136
|
|
|
131
137
|
# Test CPU
|
|
132
138
|
model = model.cpu()
|
{ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/tests/test_engine.py
RENAMED
|
@@ -23,13 +23,13 @@ def test_export():
|
|
|
23
23
|
exporter = Exporter()
|
|
24
24
|
exporter.add_callback("on_export_start", test_func)
|
|
25
25
|
assert test_func in exporter.callbacks["on_export_start"], "callback test failed"
|
|
26
|
-
f = exporter(model=YOLO("
|
|
26
|
+
f = exporter(model=YOLO("yolo26n.yaml").model)
|
|
27
27
|
YOLO(f)(ASSETS) # exported model inference
|
|
28
28
|
|
|
29
29
|
|
|
30
30
|
def test_detect():
|
|
31
31
|
"""Test YOLO object detection training, validation, and prediction functionality."""
|
|
32
|
-
overrides = {"data": "coco8.yaml", "model": "
|
|
32
|
+
overrides = {"data": "coco8.yaml", "model": "yolo26n.yaml", "imgsz": 32, "epochs": 1, "save": False}
|
|
33
33
|
cfg = get_cfg(DEFAULT_CFG)
|
|
34
34
|
cfg.data = "coco8.yaml"
|
|
35
35
|
cfg.imgsz = 32
|
|
@@ -71,7 +71,7 @@ def test_segment():
|
|
|
71
71
|
"""Test image segmentation training, validation, and prediction pipelines using YOLO models."""
|
|
72
72
|
overrides = {
|
|
73
73
|
"data": "coco8-seg.yaml",
|
|
74
|
-
"model": "
|
|
74
|
+
"model": "yolo26n-seg.yaml",
|
|
75
75
|
"imgsz": 32,
|
|
76
76
|
"epochs": 1,
|
|
77
77
|
"save": False,
|
|
@@ -98,7 +98,7 @@ def test_segment():
|
|
|
98
98
|
pred = segment.SegmentationPredictor(overrides={"imgsz": [64, 64]})
|
|
99
99
|
pred.add_callback("on_predict_start", test_func)
|
|
100
100
|
assert test_func in pred.callbacks["on_predict_start"], "callback test failed"
|
|
101
|
-
result = pred(source=ASSETS, model=WEIGHTS_DIR / "
|
|
101
|
+
result = pred(source=ASSETS, model=WEIGHTS_DIR / "yolo26n-seg.pt")
|
|
102
102
|
assert len(result), "predictor test failed"
|
|
103
103
|
|
|
104
104
|
# Test resume functionality
|
|
@@ -115,7 +115,7 @@ def test_segment():
|
|
|
115
115
|
|
|
116
116
|
def test_classify():
|
|
117
117
|
"""Test image classification including training, validation, and prediction phases."""
|
|
118
|
-
overrides = {"data": "imagenet10", "model": "
|
|
118
|
+
overrides = {"data": "imagenet10", "model": "yolo26n-cls.yaml", "imgsz": 32, "epochs": 1, "save": False}
|
|
119
119
|
cfg = get_cfg(DEFAULT_CFG)
|
|
120
120
|
cfg.data = "imagenet10"
|
|
121
121
|
cfg.imgsz = 32
|
|
@@ -150,7 +150,7 @@ def test_nan_recovery():
|
|
|
150
150
|
trainer.tloss *= torch.tensor(float("nan"))
|
|
151
151
|
nan_injected[0] = True
|
|
152
152
|
|
|
153
|
-
overrides = {"data": "coco8.yaml", "model": "
|
|
153
|
+
overrides = {"data": "coco8.yaml", "model": "yolo26n.yaml", "imgsz": 32, "epochs": 3}
|
|
154
154
|
trainer = detect.DetectionTrainer(overrides=overrides)
|
|
155
155
|
trainer.add_callback("on_train_batch_end", inject_nan)
|
|
156
156
|
trainer.train()
|
{ultralytics_opencv_headless-8.3.248 → ultralytics_opencv_headless-8.4.0}/tests/test_exports.py
RENAMED
|
@@ -12,8 +12,8 @@ import pytest
|
|
|
12
12
|
from tests import MODEL, SOURCE
|
|
13
13
|
from ultralytics import YOLO
|
|
14
14
|
from ultralytics.cfg import TASK2DATA, TASK2MODEL, TASKS
|
|
15
|
-
from ultralytics.utils import ARM64, IS_RASPBERRYPI, LINUX, MACOS, WINDOWS, checks
|
|
16
|
-
from ultralytics.utils.torch_utils import TORCH_1_10, TORCH_1_11, TORCH_1_13, TORCH_2_1, TORCH_2_8, TORCH_2_9
|
|
15
|
+
from ultralytics.utils import ARM64, IS_RASPBERRYPI, LINUX, MACOS, MACOS_VERSION, WINDOWS, checks
|
|
16
|
+
from ultralytics.utils.torch_utils import TORCH_1_10, TORCH_1_11, TORCH_1_13, TORCH_2_0, TORCH_2_1, TORCH_2_8, TORCH_2_9
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
def test_export_torchscript():
|
|
@@ -112,6 +112,9 @@ def test_export_torchscript_matrix(task, dynamic, int8, half, batch, nms):
|
|
|
112
112
|
@pytest.mark.skipif(not MACOS, reason="CoreML inference only supported on macOS")
|
|
113
113
|
@pytest.mark.skipif(not TORCH_1_11, reason="CoreML export requires torch>=1.11")
|
|
114
114
|
@pytest.mark.skipif(checks.IS_PYTHON_3_13, reason="CoreML not supported in Python 3.13")
|
|
115
|
+
@pytest.mark.skipif(
|
|
116
|
+
MACOS and MACOS_VERSION and MACOS_VERSION >= "15", reason="CoreML YOLO26 matrix test crashes on macOS 15+"
|
|
117
|
+
)
|
|
115
118
|
@pytest.mark.parametrize(
|
|
116
119
|
"task, dynamic, int8, half, nms, batch",
|
|
117
120
|
[ # generate all combinations except for exclusion cases
|
|
@@ -235,6 +238,8 @@ def test_export_mnn_matrix(task, int8, half, batch):
|
|
|
235
238
|
|
|
236
239
|
|
|
237
240
|
@pytest.mark.slow
|
|
241
|
+
@pytest.mark.skipif(ARM64, reason="NCNN not supported on ARM64") # https://github.com/Tencent/ncnn/issues/6509
|
|
242
|
+
@pytest.mark.skipif(not TORCH_2_0, reason="NCNN inference causes segfault on PyTorch<2.0")
|
|
238
243
|
def test_export_ncnn():
|
|
239
244
|
"""Test YOLO export to NCNN format."""
|
|
240
245
|
file = YOLO(MODEL).export(format="ncnn", imgsz=32)
|
|
@@ -242,6 +247,8 @@ def test_export_ncnn():
|
|
|
242
247
|
|
|
243
248
|
|
|
244
249
|
@pytest.mark.slow
|
|
250
|
+
@pytest.mark.skipif(ARM64, reason="NCNN not supported on ARM64") # https://github.com/Tencent/ncnn/issues/6509
|
|
251
|
+
@pytest.mark.skipif(not TORCH_2_0, reason="NCNN inference causes segfault on PyTorch<2.0")
|
|
245
252
|
@pytest.mark.parametrize("task, half, batch", list(product(TASKS, [True, False], [1])))
|
|
246
253
|
def test_export_ncnn_matrix(task, half, batch):
|
|
247
254
|
"""Test YOLO export to NCNN format considering various export configurations."""
|
|
@@ -256,7 +263,7 @@ def test_export_ncnn_matrix(task, half, batch):
|
|
|
256
263
|
@pytest.mark.skipif(ARM64, reason="IMX export is not supported on ARM64 architectures.")
|
|
257
264
|
def test_export_imx():
|
|
258
265
|
"""Test YOLO export to IMX format."""
|
|
259
|
-
model = YOLO(
|
|
266
|
+
model = YOLO("yolo11n.pt") # IMX export only supports YOLO11
|
|
260
267
|
file = model.export(format="imx", imgsz=32)
|
|
261
268
|
YOLO(file)(SOURCE, imgsz=32)
|
|
262
269
|
|