ultralytics-opencv-headless 8.4.2__tar.gz → 8.4.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/PKG-INFO +36 -36
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/README.md +35 -35
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/__init__.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/__init__.py +18 -18
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/annotator.py +2 -2
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/converter.py +9 -9
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/exporter.py +22 -22
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/model.py +33 -33
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/predictor.py +17 -17
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/results.py +2 -9
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/trainer.py +19 -12
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/tuner.py +4 -4
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/validator.py +16 -16
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/classify/predict.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/classify/train.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/classify/val.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/detect/predict.py +2 -2
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/detect/train.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/detect/val.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/model.py +7 -7
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/obb/predict.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/obb/train.py +2 -2
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/obb/val.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/pose/predict.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/pose/train.py +4 -2
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/pose/val.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/segment/predict.py +2 -2
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/segment/train.py +3 -3
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/segment/val.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/autobackend.py +2 -2
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/head.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/tasks.py +12 -12
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/ai_gym.py +3 -3
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/config.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/heatmap.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/instance_segmentation.py +2 -2
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/parking_management.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/solutions.py +2 -2
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/track.py +1 -1
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/__init__.py +8 -8
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/benchmarks.py +23 -23
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/platform.py +11 -9
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/checks.py +6 -6
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/downloads.py +2 -2
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/export/imx.py +3 -8
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/files.py +2 -2
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/loss.py +3 -3
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/tuner.py +2 -2
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics_opencv_headless.egg-info/PKG-INFO +36 -36
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/LICENSE +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/pyproject.toml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/setup.cfg +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/tests/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/tests/conftest.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/tests/test_cli.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/tests/test_cuda.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/tests/test_engine.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/tests/test_exports.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/tests/test_integrations.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/tests/test_python.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/tests/test_solutions.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/assets/bus.jpg +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/assets/zidane.jpg +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/Argoverse.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/DOTAv1.5.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/DOTAv1.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/GlobalWheat2020.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/HomeObjects-3K.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/ImageNet.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/Objects365.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/SKU-110K.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/TT100K.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/VOC.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/VisDrone.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/african-wildlife.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/brain-tumor.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/carparts-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco128-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco128.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco8-grayscale.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco8-multispectral.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco8-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco8-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/coco8.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/construction-ppe.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/crack-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/dog-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/dota8-multispectral.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/dota8.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/hand-keypoints.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/kitti.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/lvis.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/medical-pills.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/open-images-v7.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/package-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/signature.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/tiger-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/datasets/xView.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/default.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yolo11-cls-resnet18.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yolo11-cls.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yolo11-obb.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yolo11-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yolo11-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yolo11.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yoloe-11-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/11/yoloe-11.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/12/yolo12-cls.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/12/yolo12-obb.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/12/yolo12-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/12/yolo12-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/12/yolo12.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26-cls.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26-obb.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26-p2.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yolo26.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yoloe-26-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/26/yoloe-26.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v10/yolov10b.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v10/yolov10l.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v10/yolov10m.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v10/yolov10n.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v10/yolov10s.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v10/yolov10x.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v3/yolov3-spp.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v3/yolov3-tiny.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v3/yolov3.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v5/yolov5-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v5/yolov5.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v6/yolov6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yoloe-v8-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yoloe-v8.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-cls.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-ghost.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-obb.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-p2.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-world.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8-worldv2.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v8/yolov8.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9c-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9c.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9e-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9e.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9m.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9s.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/models/v9/yolov9t.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/trackers/botsort.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/trackers/bytetrack.yaml +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/augment.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/base.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/build.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/dataset.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/loaders.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/scripts/download_weights.sh +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/scripts/get_coco.sh +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/scripts/get_coco128.sh +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/scripts/get_imagenet.sh +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/split.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/split_dota.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/data/utils.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/engine/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/hub/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/hub/auth.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/hub/google/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/hub/session.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/hub/utils.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/fastsam/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/fastsam/model.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/fastsam/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/fastsam/utils.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/fastsam/val.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/nas/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/nas/model.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/nas/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/nas/val.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/rtdetr/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/rtdetr/model.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/rtdetr/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/rtdetr/train.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/rtdetr/val.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/amg.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/build.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/build_sam3.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/model.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/blocks.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/decoders.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/encoders.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/memory_attention.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/sam.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/tiny_encoder.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/transformer.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/modules/utils.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/decoder.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/encoder.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/geometry_encoders.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/maskformer_segmentation.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/model_misc.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/necks.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/sam3_image.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/text_encoder_ve.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/vitdet.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/sam/sam3/vl_combiner.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/utils/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/utils/loss.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/utils/ops.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/classify/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/detect/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/obb/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/pose/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/segment/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/world/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/world/train.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/world/train_world.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/yoloe/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/yoloe/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/yoloe/train.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/yoloe/train_seg.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/models/yolo/yoloe/val.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/activation.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/block.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/conv.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/transformer.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/modules/utils.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/nn/text_model.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/optim/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/optim/muon.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/py.typed +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/analytics.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/distance_calculation.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/object_blurrer.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/object_counter.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/object_cropper.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/queue_management.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/region_counter.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/security_alarm.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/similarity_search.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/speed_estimation.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/streamlit_inference.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/templates/similarity-search.html +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/trackzone.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/solutions/vision_eye.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/basetrack.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/bot_sort.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/byte_tracker.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/utils/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/utils/gmc.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/utils/kalman_filter.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/trackers/utils/matching.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/autobatch.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/autodevice.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/base.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/clearml.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/comet.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/dvc.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/hub.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/mlflow.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/neptune.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/raytune.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/tensorboard.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/callbacks/wb.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/cpu.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/dist.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/errors.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/events.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/export/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/export/engine.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/export/tensorflow.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/git.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/instance.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/logger.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/metrics.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/nms.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/ops.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/patches.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/plotting.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/tal.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/torch_utils.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/tqdm.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/utils/triton.py +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics_opencv_headless.egg-info/SOURCES.txt +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics_opencv_headless.egg-info/dependency_links.txt +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics_opencv_headless.egg-info/entry_points.txt +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics_opencv_headless.egg-info/requires.txt +0 -0
- {ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics_opencv_headless.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ultralytics-opencv-headless
|
|
3
|
-
Version: 8.4.
|
|
3
|
+
Version: 8.4.3
|
|
4
4
|
Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
|
|
5
5
|
Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
|
|
6
6
|
Maintainer-email: Ultralytics <hello@ultralytics.com>
|
|
@@ -218,13 +218,13 @@ Ultralytics supports a wide range of YOLO models, from early versions like [YOLO
|
|
|
218
218
|
|
|
219
219
|
Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examples. These models are trained on the [COCO dataset](https://cocodataset.org/), featuring 80 object classes.
|
|
220
220
|
|
|
221
|
-
| Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
222
|
-
| ------------------------------------------------------------------------------------ |
|
|
223
|
-
| [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640
|
|
224
|
-
| [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640
|
|
225
|
-
| [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640
|
|
226
|
-
| [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640
|
|
227
|
-
| [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640
|
|
221
|
+
| Model | size<br><sup>(pixels)</sup> | mAP<sup>val<br>50-95</sup> | mAP<sup>val<br>50-95(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
|
|
222
|
+
| ------------------------------------------------------------------------------------ | --------------------------- | -------------------------- | ------------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
|
|
223
|
+
| [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640 | 40.9 | 40.1 | 38.9 ± 0.7 | 1.7 ± 0.0 | 2.4 | 5.4 |
|
|
224
|
+
| [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640 | 48.6 | 47.8 | 87.2 ± 0.9 | 2.5 ± 0.0 | 9.5 | 20.7 |
|
|
225
|
+
| [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640 | 53.1 | 52.5 | 220.0 ± 1.4 | 4.7 ± 0.1 | 20.4 | 68.2 |
|
|
226
|
+
| [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640 | 55.0 | 54.4 | 286.2 ± 2.0 | 6.2 ± 0.2 | 24.8 | 86.4 |
|
|
227
|
+
| [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640 | 57.5 | 56.9 | 525.8 ± 4.0 | 11.8 ± 0.2 | 55.7 | 193.9 |
|
|
228
228
|
|
|
229
229
|
- **mAP<sup>val</sup>** values refer to single-model single-scale performance on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val detect data=coco.yaml device=0`
|
|
230
230
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val detect data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -235,13 +235,13 @@ Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usa
|
|
|
235
235
|
|
|
236
236
|
Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage examples. These models are trained on [COCO-Seg](https://docs.ultralytics.com/datasets/segment/coco/), including 80 classes.
|
|
237
237
|
|
|
238
|
-
| Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
239
|
-
| -------------------------------------------------------------------------------------------- |
|
|
240
|
-
| [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640
|
|
241
|
-
| [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640
|
|
242
|
-
| [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640
|
|
243
|
-
| [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640
|
|
244
|
-
| [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640
|
|
238
|
+
| Model | size<br><sup>(pixels)</sup> | mAP<sup>box<br>50-95(e2e)</sup> | mAP<sup>mask<br>50-95(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
|
|
239
|
+
| -------------------------------------------------------------------------------------------- | --------------------------- | ------------------------------- | -------------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
|
|
240
|
+
| [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.7 | 9.1 |
|
|
241
|
+
| [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.4 | 34.2 |
|
|
242
|
+
| [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 23.6 | 121.5 |
|
|
243
|
+
| [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 28.0 | 139.8 |
|
|
244
|
+
| [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 62.8 | 313.5 |
|
|
245
245
|
|
|
246
246
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val segment data=coco.yaml device=0`
|
|
247
247
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val segment data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -252,13 +252,13 @@ Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) fo
|
|
|
252
252
|
|
|
253
253
|
Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usage examples. These models are trained on [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), covering 1000 classes.
|
|
254
254
|
|
|
255
|
-
| Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) at 224 |
|
|
256
|
-
| -------------------------------------------------------------------------------------------- |
|
|
257
|
-
| [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224
|
|
258
|
-
| [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224
|
|
259
|
-
| [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224
|
|
260
|
-
| [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224
|
|
261
|
-
| [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224
|
|
255
|
+
| Model | size<br><sup>(pixels)</sup> | acc<br><sup>top1</sup> | acc<br><sup>top5</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B) at 224</sup> |
|
|
256
|
+
| -------------------------------------------------------------------------------------------- | --------------------------- | ---------------------- | ---------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ------------------------------ |
|
|
257
|
+
| [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224 | 71.4 | 90.1 | 5.0 ± 0.3 | 1.1 ± 0.0 | 2.8 | 0.5 |
|
|
258
|
+
| [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224 | 76.0 | 92.9 | 7.9 ± 0.2 | 1.3 ± 0.0 | 6.7 | 1.6 |
|
|
259
|
+
| [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224 | 78.1 | 94.2 | 17.2 ± 0.4 | 2.0 ± 0.0 | 11.6 | 4.9 |
|
|
260
|
+
| [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224 | 79.0 | 94.6 | 23.2 ± 0.3 | 2.8 ± 0.0 | 14.1 | 6.2 |
|
|
261
|
+
| [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224 | 79.9 | 95.0 | 41.4 ± 0.9 | 3.8 ± 0.0 | 29.6 | 13.6 |
|
|
262
262
|
|
|
263
263
|
- **acc** values represent model accuracy on the [ImageNet](https://www.image-net.org/) dataset validation set. <br>Reproduce with `yolo val classify data=path/to/ImageNet device=0`
|
|
264
264
|
- **Speed** metrics are averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`
|
|
@@ -269,13 +269,13 @@ Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/)
|
|
|
269
269
|
|
|
270
270
|
See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples. These models are trained on [COCO-Pose](https://docs.ultralytics.com/datasets/pose/coco/), focusing on the 'person' class.
|
|
271
271
|
|
|
272
|
-
| Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
273
|
-
| ---------------------------------------------------------------------------------------------- |
|
|
274
|
-
| [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640
|
|
275
|
-
| [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640
|
|
276
|
-
| [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640
|
|
277
|
-
| [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640
|
|
278
|
-
| [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640
|
|
272
|
+
| Model | size<br><sup>(pixels)</sup> | mAP<sup>pose<br>50-95(e2e)</sup> | mAP<sup>pose<br>50(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
|
|
273
|
+
| ---------------------------------------------------------------------------------------------- | --------------------------- | -------------------------------- | ----------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
|
|
274
|
+
| [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 57.2 | 83.3 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
|
|
275
|
+
| [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.0 | 86.6 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
|
|
276
|
+
| [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.6 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
|
|
277
|
+
| [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.5 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
|
|
278
|
+
| [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.6 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
|
|
279
279
|
|
|
280
280
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO Keypoints val2017](https://docs.ultralytics.com/datasets/pose/coco/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val pose data=coco-pose.yaml device=0`
|
|
281
281
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
|
|
@@ -286,13 +286,13 @@ See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usa
|
|
|
286
286
|
|
|
287
287
|
Check the [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples. These models are trained on [DOTAv1](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10/), including 15 classes.
|
|
288
288
|
|
|
289
|
-
| Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
290
|
-
| -------------------------------------------------------------------------------------------- |
|
|
291
|
-
| [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024
|
|
292
|
-
| [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024
|
|
293
|
-
| [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024
|
|
294
|
-
| [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024
|
|
295
|
-
| [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024
|
|
289
|
+
| Model | size<br><sup>(pixels)</sup> | mAP<sup>test<br>50-95(e2e)</sup> | mAP<sup>test<br>50(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
|
|
290
|
+
| -------------------------------------------------------------------------------------------- | --------------------------- | -------------------------------- | ----------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
|
|
291
|
+
| [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 52.4 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
|
|
292
|
+
| [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 54.8 | 80.9 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
|
|
293
|
+
| [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 55.3 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
|
|
294
|
+
| [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 56.2 | 81.6 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
|
|
295
|
+
| [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 56.7 | 81.7 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
|
|
296
296
|
|
|
297
297
|
- **mAP<sup>test</sup>** values are for single-model multiscale performance on the [DOTAv1 test set](https://captain-whu.github.io/DOTA/dataset.html). <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to the [DOTA evaluation server](https://captain-whu.github.io/DOTA/evaluation.html).
|
|
298
298
|
- **Speed** metrics are averaged over [DOTAv1 val images](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10) using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
|
|
@@ -129,13 +129,13 @@ Ultralytics supports a wide range of YOLO models, from early versions like [YOLO
|
|
|
129
129
|
|
|
130
130
|
Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examples. These models are trained on the [COCO dataset](https://cocodataset.org/), featuring 80 object classes.
|
|
131
131
|
|
|
132
|
-
| Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
133
|
-
| ------------------------------------------------------------------------------------ |
|
|
134
|
-
| [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640
|
|
135
|
-
| [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640
|
|
136
|
-
| [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640
|
|
137
|
-
| [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640
|
|
138
|
-
| [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640
|
|
132
|
+
| Model | size<br><sup>(pixels)</sup> | mAP<sup>val<br>50-95</sup> | mAP<sup>val<br>50-95(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
|
|
133
|
+
| ------------------------------------------------------------------------------------ | --------------------------- | -------------------------- | ------------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
|
|
134
|
+
| [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640 | 40.9 | 40.1 | 38.9 ± 0.7 | 1.7 ± 0.0 | 2.4 | 5.4 |
|
|
135
|
+
| [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640 | 48.6 | 47.8 | 87.2 ± 0.9 | 2.5 ± 0.0 | 9.5 | 20.7 |
|
|
136
|
+
| [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640 | 53.1 | 52.5 | 220.0 ± 1.4 | 4.7 ± 0.1 | 20.4 | 68.2 |
|
|
137
|
+
| [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640 | 55.0 | 54.4 | 286.2 ± 2.0 | 6.2 ± 0.2 | 24.8 | 86.4 |
|
|
138
|
+
| [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640 | 57.5 | 56.9 | 525.8 ± 4.0 | 11.8 ± 0.2 | 55.7 | 193.9 |
|
|
139
139
|
|
|
140
140
|
- **mAP<sup>val</sup>** values refer to single-model single-scale performance on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val detect data=coco.yaml device=0`
|
|
141
141
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val detect data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -146,13 +146,13 @@ Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usa
|
|
|
146
146
|
|
|
147
147
|
Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage examples. These models are trained on [COCO-Seg](https://docs.ultralytics.com/datasets/segment/coco/), including 80 classes.
|
|
148
148
|
|
|
149
|
-
| Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
150
|
-
| -------------------------------------------------------------------------------------------- |
|
|
151
|
-
| [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640
|
|
152
|
-
| [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640
|
|
153
|
-
| [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640
|
|
154
|
-
| [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640
|
|
155
|
-
| [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640
|
|
149
|
+
| Model | size<br><sup>(pixels)</sup> | mAP<sup>box<br>50-95(e2e)</sup> | mAP<sup>mask<br>50-95(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
|
|
150
|
+
| -------------------------------------------------------------------------------------------- | --------------------------- | ------------------------------- | -------------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
|
|
151
|
+
| [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.7 | 9.1 |
|
|
152
|
+
| [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.4 | 34.2 |
|
|
153
|
+
| [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 23.6 | 121.5 |
|
|
154
|
+
| [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 28.0 | 139.8 |
|
|
155
|
+
| [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 62.8 | 313.5 |
|
|
156
156
|
|
|
157
157
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val segment data=coco.yaml device=0`
|
|
158
158
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val segment data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -163,13 +163,13 @@ Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) fo
|
|
|
163
163
|
|
|
164
164
|
Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usage examples. These models are trained on [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), covering 1000 classes.
|
|
165
165
|
|
|
166
|
-
| Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) at 224 |
|
|
167
|
-
| -------------------------------------------------------------------------------------------- |
|
|
168
|
-
| [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224
|
|
169
|
-
| [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224
|
|
170
|
-
| [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224
|
|
171
|
-
| [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224
|
|
172
|
-
| [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224
|
|
166
|
+
| Model | size<br><sup>(pixels)</sup> | acc<br><sup>top1</sup> | acc<br><sup>top5</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B) at 224</sup> |
|
|
167
|
+
| -------------------------------------------------------------------------------------------- | --------------------------- | ---------------------- | ---------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ------------------------------ |
|
|
168
|
+
| [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224 | 71.4 | 90.1 | 5.0 ± 0.3 | 1.1 ± 0.0 | 2.8 | 0.5 |
|
|
169
|
+
| [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224 | 76.0 | 92.9 | 7.9 ± 0.2 | 1.3 ± 0.0 | 6.7 | 1.6 |
|
|
170
|
+
| [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224 | 78.1 | 94.2 | 17.2 ± 0.4 | 2.0 ± 0.0 | 11.6 | 4.9 |
|
|
171
|
+
| [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224 | 79.0 | 94.6 | 23.2 ± 0.3 | 2.8 ± 0.0 | 14.1 | 6.2 |
|
|
172
|
+
| [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224 | 79.9 | 95.0 | 41.4 ± 0.9 | 3.8 ± 0.0 | 29.6 | 13.6 |
|
|
173
173
|
|
|
174
174
|
- **acc** values represent model accuracy on the [ImageNet](https://www.image-net.org/) dataset validation set. <br>Reproduce with `yolo val classify data=path/to/ImageNet device=0`
|
|
175
175
|
- **Speed** metrics are averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`
|
|
@@ -180,13 +180,13 @@ Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/)
|
|
|
180
180
|
|
|
181
181
|
See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples. These models are trained on [COCO-Pose](https://docs.ultralytics.com/datasets/pose/coco/), focusing on the 'person' class.
|
|
182
182
|
|
|
183
|
-
| Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
184
|
-
| ---------------------------------------------------------------------------------------------- |
|
|
185
|
-
| [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640
|
|
186
|
-
| [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640
|
|
187
|
-
| [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640
|
|
188
|
-
| [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640
|
|
189
|
-
| [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640
|
|
183
|
+
| Model | size<br><sup>(pixels)</sup> | mAP<sup>pose<br>50-95(e2e)</sup> | mAP<sup>pose<br>50(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
|
|
184
|
+
| ---------------------------------------------------------------------------------------------- | --------------------------- | -------------------------------- | ----------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
|
|
185
|
+
| [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 57.2 | 83.3 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
|
|
186
|
+
| [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.0 | 86.6 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
|
|
187
|
+
| [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.6 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
|
|
188
|
+
| [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.5 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
|
|
189
|
+
| [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.6 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
|
|
190
190
|
|
|
191
191
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO Keypoints val2017](https://docs.ultralytics.com/datasets/pose/coco/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val pose data=coco-pose.yaml device=0`
|
|
192
192
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
|
|
@@ -197,13 +197,13 @@ See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usa
|
|
|
197
197
|
|
|
198
198
|
Check the [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples. These models are trained on [DOTAv1](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10/), including 15 classes.
|
|
199
199
|
|
|
200
|
-
| Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
201
|
-
| -------------------------------------------------------------------------------------------- |
|
|
202
|
-
| [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024
|
|
203
|
-
| [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024
|
|
204
|
-
| [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024
|
|
205
|
-
| [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024
|
|
206
|
-
| [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024
|
|
200
|
+
| Model | size<br><sup>(pixels)</sup> | mAP<sup>test<br>50-95(e2e)</sup> | mAP<sup>test<br>50(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
|
|
201
|
+
| -------------------------------------------------------------------------------------------- | --------------------------- | -------------------------------- | ----------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
|
|
202
|
+
| [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 52.4 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
|
|
203
|
+
| [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 54.8 | 80.9 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
|
|
204
|
+
| [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 55.3 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
|
|
205
|
+
| [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 56.2 | 81.6 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
|
|
206
|
+
| [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 56.7 | 81.7 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
|
|
207
207
|
|
|
208
208
|
- **mAP<sup>test</sup>** values are for single-model multiscale performance on the [DOTAv1 test set](https://captain-whu.github.io/DOTA/dataset.html). <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to the [DOTA evaluation server](https://captain-whu.github.io/DOTA/evaluation.html).
|
|
209
209
|
- **Speed** metrics are averaged over [DOTAv1 val images](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10) using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
|
{ultralytics_opencv_headless-8.4.2 → ultralytics_opencv_headless-8.4.3}/ultralytics/cfg/__init__.py
RENAMED
|
@@ -90,13 +90,13 @@ SOLUTIONS_HELP_MSG = f"""
|
|
|
90
90
|
yolo solutions count source="path/to/video.mp4" region="[(20, 400), (1080, 400), (1080, 360), (20, 360)]"
|
|
91
91
|
|
|
92
92
|
2. Call heatmap solution
|
|
93
|
-
yolo solutions heatmap colormap=cv2.COLORMAP_PARULA model=
|
|
93
|
+
yolo solutions heatmap colormap=cv2.COLORMAP_PARULA model=yolo26n.pt
|
|
94
94
|
|
|
95
95
|
3. Call queue management solution
|
|
96
|
-
yolo solutions queue region="[(20, 400), (1080, 400), (1080, 360), (20, 360)]" model=
|
|
96
|
+
yolo solutions queue region="[(20, 400), (1080, 400), (1080, 360), (20, 360)]" model=yolo26n.pt
|
|
97
97
|
|
|
98
98
|
4. Call workout monitoring solution for push-ups
|
|
99
|
-
yolo solutions workout model=
|
|
99
|
+
yolo solutions workout model=yolo26n-pose.pt kpts=[6, 8, 10]
|
|
100
100
|
|
|
101
101
|
5. Generate analytical graphs
|
|
102
102
|
yolo solutions analytics analytics_type="pie"
|
|
@@ -118,16 +118,16 @@ CLI_HELP_MSG = f"""
|
|
|
118
118
|
See all ARGS at https://docs.ultralytics.com/usage/cfg or with 'yolo cfg'
|
|
119
119
|
|
|
120
120
|
1. Train a detection model for 10 epochs with an initial learning_rate of 0.01
|
|
121
|
-
yolo train data=coco8.yaml model=
|
|
121
|
+
yolo train data=coco8.yaml model=yolo26n.pt epochs=10 lr0=0.01
|
|
122
122
|
|
|
123
123
|
2. Predict a YouTube video using a pretrained segmentation model at image size 320:
|
|
124
|
-
yolo predict model=
|
|
124
|
+
yolo predict model=yolo26n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320
|
|
125
125
|
|
|
126
126
|
3. Validate a pretrained detection model at batch-size 1 and image size 640:
|
|
127
|
-
yolo val model=
|
|
127
|
+
yolo val model=yolo26n.pt data=coco8.yaml batch=1 imgsz=640
|
|
128
128
|
|
|
129
|
-
4. Export a
|
|
130
|
-
yolo export model=
|
|
129
|
+
4. Export a YOLO26n classification model to ONNX format at image size 224 by 128 (no TASK required)
|
|
130
|
+
yolo export model=yolo26n-cls.pt format=onnx imgsz=224,128
|
|
131
131
|
|
|
132
132
|
5. Ultralytics solutions usage
|
|
133
133
|
yolo solutions count or any of {list(SOLUTION_MAP.keys())[1:-1]} source="path/to/video.mp4"
|
|
@@ -604,7 +604,7 @@ def handle_yolo_settings(args: list[str]) -> None:
|
|
|
604
604
|
|
|
605
605
|
Examples:
|
|
606
606
|
>>> handle_yolo_settings(["reset"]) # Reset YOLO settings
|
|
607
|
-
>>> handle_yolo_settings(["default_cfg_path=
|
|
607
|
+
>>> handle_yolo_settings(["default_cfg_path=yolo26n.yaml"]) # Update a specific setting
|
|
608
608
|
|
|
609
609
|
Notes:
|
|
610
610
|
- If no arguments are provided, the function will display the current settings.
|
|
@@ -649,7 +649,7 @@ def handle_yolo_solutions(args: list[str]) -> None:
|
|
|
649
649
|
>>> handle_yolo_solutions(["analytics", "conf=0.25", "source=path/to/video.mp4"])
|
|
650
650
|
|
|
651
651
|
Run inference with custom configuration, requires Streamlit version 1.29.0 or higher.
|
|
652
|
-
>>> handle_yolo_solutions(["inference", "model=
|
|
652
|
+
>>> handle_yolo_solutions(["inference", "model=yolo26n.pt"])
|
|
653
653
|
|
|
654
654
|
Notes:
|
|
655
655
|
- Arguments can be provided in the format 'key=value' or as boolean flags
|
|
@@ -707,7 +707,7 @@ def handle_yolo_solutions(args: list[str]) -> None:
|
|
|
707
707
|
str(ROOT / "solutions/streamlit_inference.py"),
|
|
708
708
|
"--server.headless",
|
|
709
709
|
"true",
|
|
710
|
-
overrides.pop("model", "
|
|
710
|
+
overrides.pop("model", "yolo26n.pt"),
|
|
711
711
|
]
|
|
712
712
|
)
|
|
713
713
|
else:
|
|
@@ -758,9 +758,9 @@ def parse_key_value_pair(pair: str = "key=value") -> tuple:
|
|
|
758
758
|
AssertionError: If the value is missing or empty.
|
|
759
759
|
|
|
760
760
|
Examples:
|
|
761
|
-
>>> key, value = parse_key_value_pair("model=
|
|
761
|
+
>>> key, value = parse_key_value_pair("model=yolo26n.pt")
|
|
762
762
|
>>> print(f"Key: {key}, Value: {value}")
|
|
763
|
-
Key: model, Value:
|
|
763
|
+
Key: model, Value: yolo26n.pt
|
|
764
764
|
|
|
765
765
|
>>> key, value = parse_key_value_pair("epochs=100")
|
|
766
766
|
>>> print(f"Key: {key}, Value: {value}")
|
|
@@ -832,13 +832,13 @@ def entrypoint(debug: str = "") -> None:
|
|
|
832
832
|
|
|
833
833
|
Examples:
|
|
834
834
|
Train a detection model for 10 epochs with an initial learning_rate of 0.01:
|
|
835
|
-
>>> entrypoint("train data=coco8.yaml model=
|
|
835
|
+
>>> entrypoint("train data=coco8.yaml model=yolo26n.pt epochs=10 lr0=0.01")
|
|
836
836
|
|
|
837
837
|
Predict a YouTube video using a pretrained segmentation model at image size 320:
|
|
838
|
-
>>> entrypoint("predict model=
|
|
838
|
+
>>> entrypoint("predict model=yolo26n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320")
|
|
839
839
|
|
|
840
840
|
Validate a pretrained detection model at batch-size 1 and image size 640:
|
|
841
|
-
>>> entrypoint("val model=
|
|
841
|
+
>>> entrypoint("val model=yolo26n.pt data=coco8.yaml batch=1 imgsz=640")
|
|
842
842
|
|
|
843
843
|
Notes:
|
|
844
844
|
- If no arguments are passed, the function will display the usage help message.
|
|
@@ -933,7 +933,7 @@ def entrypoint(debug: str = "") -> None:
|
|
|
933
933
|
# Model
|
|
934
934
|
model = overrides.pop("model", DEFAULT_CFG.model)
|
|
935
935
|
if model is None:
|
|
936
|
-
model = "
|
|
936
|
+
model = "yolo26n.pt"
|
|
937
937
|
LOGGER.warning(f"'model' argument is missing. Using default 'model={model}'.")
|
|
938
938
|
overrides["model"] = model
|
|
939
939
|
stem = Path(model).stem.lower()
|
|
@@ -1022,5 +1022,5 @@ def copy_default_cfg() -> None:
|
|
|
1022
1022
|
|
|
1023
1023
|
|
|
1024
1024
|
if __name__ == "__main__":
|
|
1025
|
-
# Example: entrypoint(debug='yolo predict model=
|
|
1025
|
+
# Example: entrypoint(debug='yolo predict model=yolo26n.pt')
|
|
1026
1026
|
entrypoint(debug="")
|
|
@@ -9,7 +9,7 @@ from ultralytics import SAM, YOLO
|
|
|
9
9
|
|
|
10
10
|
def auto_annotate(
|
|
11
11
|
data: str | Path,
|
|
12
|
-
det_model: str = "
|
|
12
|
+
det_model: str = "yolo26x.pt",
|
|
13
13
|
sam_model: str = "sam_b.pt",
|
|
14
14
|
device: str = "",
|
|
15
15
|
conf: float = 0.25,
|
|
@@ -39,7 +39,7 @@ def auto_annotate(
|
|
|
39
39
|
|
|
40
40
|
Examples:
|
|
41
41
|
>>> from ultralytics.data.annotator import auto_annotate
|
|
42
|
-
>>> auto_annotate(data="ultralytics/assets", det_model="
|
|
42
|
+
>>> auto_annotate(data="ultralytics/assets", det_model="yolo26n.pt", sam_model="mobile_sam.pt")
|
|
43
43
|
"""
|
|
44
44
|
det_model = YOLO(det_model)
|
|
45
45
|
sam_model = SAM(sam_model)
|
|
@@ -15,7 +15,7 @@ import numpy as np
|
|
|
15
15
|
from PIL import Image
|
|
16
16
|
|
|
17
17
|
from ultralytics.utils import ASSETS_URL, DATASETS_DIR, LOGGER, NUM_THREADS, TQDM, YAML
|
|
18
|
-
from ultralytics.utils.checks import check_file
|
|
18
|
+
from ultralytics.utils.checks import check_file
|
|
19
19
|
from ultralytics.utils.downloads import download, zip_directory
|
|
20
20
|
from ultralytics.utils.files import increment_path
|
|
21
21
|
|
|
@@ -747,7 +747,7 @@ def convert_to_multispectral(path: str | Path, n_channels: int = 10, replace: bo
|
|
|
747
747
|
|
|
748
748
|
|
|
749
749
|
async def convert_ndjson_to_yolo(ndjson_path: str | Path, output_path: str | Path | None = None) -> Path:
|
|
750
|
-
"""Convert NDJSON dataset format to Ultralytics
|
|
750
|
+
"""Convert NDJSON dataset format to Ultralytics YOLO dataset structure.
|
|
751
751
|
|
|
752
752
|
This function converts datasets stored in NDJSON (Newline Delimited JSON) format to the standard YOLO format. For
|
|
753
753
|
detection/segmentation/pose/obb tasks, it creates separate directories for images and labels. For classification
|
|
@@ -776,9 +776,11 @@ async def convert_ndjson_to_yolo(ndjson_path: str | Path, output_path: str | Pat
|
|
|
776
776
|
|
|
777
777
|
Use with YOLO training
|
|
778
778
|
>>> from ultralytics import YOLO
|
|
779
|
-
>>> model = YOLO("
|
|
779
|
+
>>> model = YOLO("yolo26n.pt")
|
|
780
780
|
>>> model.train(data="https://github.com/ultralytics/assets/releases/download/v0.0.0/coco8-ndjson.ndjson")
|
|
781
781
|
"""
|
|
782
|
+
from ultralytics.utils.checks import check_requirements
|
|
783
|
+
|
|
782
784
|
check_requirements("aiohttp")
|
|
783
785
|
import aiohttp
|
|
784
786
|
|
|
@@ -835,21 +837,19 @@ async def convert_ndjson_to_yolo(ndjson_path: str | Path, output_path: str | Pat
|
|
|
835
837
|
# Download image if URL provided and file doesn't exist
|
|
836
838
|
if http_url := record.get("url"):
|
|
837
839
|
if not image_path.exists():
|
|
838
|
-
image_path.parent.mkdir(parents=True, exist_ok=True)
|
|
840
|
+
image_path.parent.mkdir(parents=True, exist_ok=True)
|
|
839
841
|
try:
|
|
840
842
|
async with session.get(http_url, timeout=aiohttp.ClientTimeout(total=30)) as response:
|
|
841
843
|
response.raise_for_status()
|
|
842
|
-
|
|
843
|
-
async for chunk in response.content.iter_chunked(8192):
|
|
844
|
-
f.write(chunk)
|
|
844
|
+
image_path.write_bytes(await response.read())
|
|
845
845
|
return True
|
|
846
846
|
except Exception as e:
|
|
847
847
|
LOGGER.warning(f"Failed to download {http_url}: {e}")
|
|
848
848
|
return False
|
|
849
849
|
return True
|
|
850
850
|
|
|
851
|
-
# Process all images with async downloads
|
|
852
|
-
semaphore = asyncio.Semaphore(
|
|
851
|
+
# Process all images with async downloads (limit connections for small datasets)
|
|
852
|
+
semaphore = asyncio.Semaphore(min(128, len(image_records)))
|
|
853
853
|
async with aiohttp.ClientSession() as session:
|
|
854
854
|
pbar = TQDM(
|
|
855
855
|
total=len(image_records),
|
|
@@ -4,38 +4,38 @@ Export a YOLO PyTorch model to other formats. TensorFlow exports authored by htt
|
|
|
4
4
|
|
|
5
5
|
Format | `format=argument` | Model
|
|
6
6
|
--- | --- | ---
|
|
7
|
-
PyTorch | - |
|
|
8
|
-
TorchScript | `torchscript` |
|
|
9
|
-
ONNX | `onnx` |
|
|
10
|
-
OpenVINO | `openvino` |
|
|
11
|
-
TensorRT | `engine` |
|
|
12
|
-
CoreML | `coreml` |
|
|
13
|
-
TensorFlow SavedModel | `saved_model` |
|
|
14
|
-
TensorFlow GraphDef | `pb` |
|
|
15
|
-
TensorFlow Lite | `tflite` |
|
|
16
|
-
TensorFlow Edge TPU | `edgetpu` |
|
|
17
|
-
TensorFlow.js | `tfjs` |
|
|
18
|
-
PaddlePaddle | `paddle` |
|
|
19
|
-
MNN | `mnn` |
|
|
20
|
-
NCNN | `ncnn` |
|
|
21
|
-
IMX | `imx` |
|
|
22
|
-
RKNN | `rknn` |
|
|
23
|
-
ExecuTorch | `executorch` |
|
|
24
|
-
Axelera | `axelera` |
|
|
7
|
+
PyTorch | - | yolo26n.pt
|
|
8
|
+
TorchScript | `torchscript` | yolo26n.torchscript
|
|
9
|
+
ONNX | `onnx` | yolo26n.onnx
|
|
10
|
+
OpenVINO | `openvino` | yolo26n_openvino_model/
|
|
11
|
+
TensorRT | `engine` | yolo26n.engine
|
|
12
|
+
CoreML | `coreml` | yolo26n.mlpackage
|
|
13
|
+
TensorFlow SavedModel | `saved_model` | yolo26n_saved_model/
|
|
14
|
+
TensorFlow GraphDef | `pb` | yolo26n.pb
|
|
15
|
+
TensorFlow Lite | `tflite` | yolo26n.tflite
|
|
16
|
+
TensorFlow Edge TPU | `edgetpu` | yolo26n_edgetpu.tflite
|
|
17
|
+
TensorFlow.js | `tfjs` | yolo26n_web_model/
|
|
18
|
+
PaddlePaddle | `paddle` | yolo26n_paddle_model/
|
|
19
|
+
MNN | `mnn` | yolo26n.mnn
|
|
20
|
+
NCNN | `ncnn` | yolo26n_ncnn_model/
|
|
21
|
+
IMX | `imx` | yolo26n_imx_model/
|
|
22
|
+
RKNN | `rknn` | yolo26n_rknn_model/
|
|
23
|
+
ExecuTorch | `executorch` | yolo26n_executorch_model/
|
|
24
|
+
Axelera | `axelera` | yolo26n_axelera_model/
|
|
25
25
|
|
|
26
26
|
Requirements:
|
|
27
27
|
$ pip install "ultralytics[export]"
|
|
28
28
|
|
|
29
29
|
Python:
|
|
30
30
|
from ultralytics import YOLO
|
|
31
|
-
model = YOLO('
|
|
31
|
+
model = YOLO('yolo26n.pt')
|
|
32
32
|
results = model.export(format='onnx')
|
|
33
33
|
|
|
34
34
|
CLI:
|
|
35
|
-
$ yolo mode=export model=
|
|
35
|
+
$ yolo mode=export model=yolo26n.pt format=onnx
|
|
36
36
|
|
|
37
37
|
Inference:
|
|
38
|
-
$ yolo predict model=
|
|
38
|
+
$ yolo predict model=yolo26n.pt # PyTorch
|
|
39
39
|
yolo11n.torchscript # TorchScript
|
|
40
40
|
yolo11n.onnx # ONNX Runtime or OpenCV DNN with dnn=True
|
|
41
41
|
yolo11n_openvino_model # OpenVINO
|
|
@@ -930,7 +930,7 @@ class Exporter:
|
|
|
930
930
|
model = IOSDetectModel(self.model, self.im, mlprogram=not mlmodel) if self.args.nms else self.model
|
|
931
931
|
else:
|
|
932
932
|
if self.args.nms:
|
|
933
|
-
LOGGER.warning(f"{prefix} 'nms=True' is only available for Detect models like '
|
|
933
|
+
LOGGER.warning(f"{prefix} 'nms=True' is only available for Detect models like 'yolo26n.pt'.")
|
|
934
934
|
# TODO CoreML Segment and Pose model pipelining
|
|
935
935
|
model = self.model
|
|
936
936
|
ts = torch.jit.trace(model.eval(), self.im, strict=False) # TorchScript model
|