ultralytics-opencv-headless 8.4.0__tar.gz → 8.4.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ultralytics_opencv_headless-8.4.0/ultralytics_opencv_headless.egg-info → ultralytics_opencv_headless-8.4.2}/PKG-INFO +14 -14
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/README.md +13 -13
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/tests/test_engine.py +2 -2
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/tests/test_exports.py +3 -3
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/__init__.py +1 -1
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/__init__.py +1 -3
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/26/yolo26-cls.yaml +2 -2
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/26/yolo26-obb.yaml +2 -2
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/26/yolo26-p2.yaml +11 -11
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/26/yolo26-p6.yaml +8 -6
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/26/yolo26-pose.yaml +2 -2
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/26/yolo26-seg.yaml +2 -2
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/26/yolo26.yaml +1 -1
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/26/yoloe-26-seg.yaml +10 -10
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/26/yoloe-26.yaml +10 -10
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/converter.py +49 -30
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/engine/exporter.py +3 -5
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/engine/results.py +19 -10
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/engine/trainer.py +8 -10
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/nn/autobackend.py +0 -4
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/nn/modules/block.py +1 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/nn/modules/head.py +5 -33
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/nn/tasks.py +2 -2
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/benchmarks.py +0 -1
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/export/tensorflow.py +40 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/loss.py +7 -4
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2/ultralytics_opencv_headless.egg-info}/PKG-INFO +14 -14
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/LICENSE +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/pyproject.toml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/setup.cfg +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/tests/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/tests/conftest.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/tests/test_cli.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/tests/test_cuda.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/tests/test_integrations.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/tests/test_python.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/tests/test_solutions.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/assets/bus.jpg +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/assets/zidane.jpg +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/Argoverse.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/DOTAv1.5.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/DOTAv1.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/GlobalWheat2020.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/HomeObjects-3K.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/ImageNet.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/Objects365.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/SKU-110K.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/TT100K.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/VOC.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/VisDrone.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/african-wildlife.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/brain-tumor.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/carparts-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/coco-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/coco.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/coco128-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/coco128.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/coco8-grayscale.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/coco8-multispectral.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/coco8-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/coco8-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/coco8.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/construction-ppe.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/crack-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/dog-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/dota8-multispectral.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/dota8.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/hand-keypoints.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/kitti.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/lvis.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/medical-pills.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/open-images-v7.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/package-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/signature.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/tiger-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/datasets/xView.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/default.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/11/yolo11-cls-resnet18.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/11/yolo11-cls.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/11/yolo11-obb.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/11/yolo11-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/11/yolo11-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/11/yolo11.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/11/yoloe-11-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/11/yoloe-11.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/12/yolo12-cls.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/12/yolo12-obb.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/12/yolo12-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/12/yolo12-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/12/yolo12.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v10/yolov10b.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v10/yolov10l.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v10/yolov10m.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v10/yolov10n.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v10/yolov10s.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v10/yolov10x.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v3/yolov3-spp.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v3/yolov3-tiny.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v3/yolov3.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v5/yolov5-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v5/yolov5.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v6/yolov6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yoloe-v8-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yoloe-v8.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-cls.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-ghost.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-obb.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-p2.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-pose.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-world.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8-worldv2.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v8/yolov8.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v9/yolov9c-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v9/yolov9c.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v9/yolov9e-seg.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v9/yolov9e.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v9/yolov9m.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v9/yolov9s.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/models/v9/yolov9t.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/trackers/botsort.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/trackers/bytetrack.yaml +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/annotator.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/augment.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/base.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/build.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/dataset.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/loaders.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/scripts/download_weights.sh +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/scripts/get_coco.sh +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/scripts/get_coco128.sh +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/scripts/get_imagenet.sh +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/split.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/split_dota.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/data/utils.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/engine/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/engine/model.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/engine/predictor.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/engine/tuner.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/engine/validator.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/hub/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/hub/auth.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/hub/google/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/hub/session.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/hub/utils.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/fastsam/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/fastsam/model.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/fastsam/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/fastsam/utils.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/fastsam/val.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/nas/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/nas/model.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/nas/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/nas/val.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/rtdetr/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/rtdetr/model.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/rtdetr/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/rtdetr/train.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/rtdetr/val.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/amg.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/build.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/build_sam3.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/model.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/modules/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/modules/blocks.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/modules/decoders.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/modules/encoders.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/modules/memory_attention.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/modules/sam.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/modules/tiny_encoder.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/modules/transformer.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/modules/utils.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/sam3/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/sam3/decoder.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/sam3/encoder.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/sam3/geometry_encoders.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/sam3/maskformer_segmentation.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/sam3/model_misc.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/sam3/necks.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/sam3/sam3_image.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/sam3/text_encoder_ve.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/sam3/vitdet.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/sam/sam3/vl_combiner.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/utils/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/utils/loss.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/utils/ops.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/classify/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/classify/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/classify/train.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/classify/val.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/detect/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/detect/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/detect/train.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/detect/val.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/model.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/obb/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/obb/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/obb/train.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/obb/val.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/pose/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/pose/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/pose/train.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/pose/val.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/segment/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/segment/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/segment/train.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/segment/val.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/world/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/world/train.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/world/train_world.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/yoloe/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/yoloe/predict.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/yoloe/train.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/yoloe/train_seg.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/models/yolo/yoloe/val.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/nn/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/nn/modules/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/nn/modules/activation.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/nn/modules/conv.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/nn/modules/transformer.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/nn/modules/utils.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/nn/text_model.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/optim/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/optim/muon.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/py.typed +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/ai_gym.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/analytics.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/config.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/distance_calculation.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/heatmap.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/instance_segmentation.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/object_blurrer.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/object_counter.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/object_cropper.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/parking_management.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/queue_management.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/region_counter.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/security_alarm.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/similarity_search.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/solutions.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/speed_estimation.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/streamlit_inference.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/templates/similarity-search.html +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/trackzone.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/solutions/vision_eye.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/trackers/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/trackers/basetrack.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/trackers/bot_sort.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/trackers/byte_tracker.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/trackers/track.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/trackers/utils/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/trackers/utils/gmc.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/trackers/utils/kalman_filter.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/trackers/utils/matching.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/autobatch.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/autodevice.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/callbacks/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/callbacks/base.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/callbacks/clearml.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/callbacks/comet.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/callbacks/dvc.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/callbacks/hub.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/callbacks/mlflow.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/callbacks/neptune.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/callbacks/platform.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/callbacks/raytune.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/callbacks/tensorboard.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/callbacks/wb.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/checks.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/cpu.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/dist.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/downloads.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/errors.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/events.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/export/__init__.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/export/engine.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/export/imx.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/files.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/git.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/instance.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/logger.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/metrics.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/nms.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/ops.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/patches.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/plotting.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/tal.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/torch_utils.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/tqdm.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/triton.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/utils/tuner.py +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics_opencv_headless.egg-info/SOURCES.txt +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics_opencv_headless.egg-info/dependency_links.txt +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics_opencv_headless.egg-info/entry_points.txt +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics_opencv_headless.egg-info/requires.txt +0 -0
- {ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics_opencv_headless.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ultralytics-opencv-headless
|
|
3
|
-
Version: 8.4.
|
|
3
|
+
Version: 8.4.2
|
|
4
4
|
Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
|
|
5
5
|
Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
|
|
6
6
|
Maintainer-email: Ultralytics <hello@ultralytics.com>
|
|
@@ -237,11 +237,11 @@ Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) fo
|
|
|
237
237
|
|
|
238
238
|
| Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
239
239
|
| -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
240
|
-
| [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.
|
|
241
|
-
| [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.
|
|
242
|
-
| [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 |
|
|
243
|
-
| [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 |
|
|
244
|
-
| [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 |
|
|
240
|
+
| [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.7 | 9.1 |
|
|
241
|
+
| [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.4 | 34.2 |
|
|
242
|
+
| [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 23.6 | 121.5 |
|
|
243
|
+
| [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 28.0 | 139.8 |
|
|
244
|
+
| [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 62.8 | 313.5 |
|
|
245
245
|
|
|
246
246
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val segment data=coco.yaml device=0`
|
|
247
247
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val segment data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -271,11 +271,11 @@ See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usa
|
|
|
271
271
|
|
|
272
272
|
| Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
273
273
|
| ---------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
274
|
-
| [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 |
|
|
275
|
-
| [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.
|
|
276
|
-
| [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.
|
|
277
|
-
| [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.
|
|
278
|
-
| [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.
|
|
274
|
+
| [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 57.2 | 83.3 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
|
|
275
|
+
| [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.0 | 86.6 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
|
|
276
|
+
| [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.6 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
|
|
277
|
+
| [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.5 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
|
|
278
|
+
| [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.6 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
|
|
279
279
|
|
|
280
280
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO Keypoints val2017](https://docs.ultralytics.com/datasets/pose/coco/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val pose data=coco-pose.yaml device=0`
|
|
281
281
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
|
|
@@ -289,10 +289,10 @@ Check the [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples
|
|
|
289
289
|
| Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
290
290
|
| -------------------------------------------------------------------------------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
291
291
|
| [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
|
|
292
|
-
| [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 |
|
|
292
|
+
| [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 80.9 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
|
|
293
293
|
| [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
|
|
294
|
-
| [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 81.
|
|
295
|
-
| [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 |
|
|
294
|
+
| [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 81.6 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
|
|
295
|
+
| [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 81.7 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
|
|
296
296
|
|
|
297
297
|
- **mAP<sup>test</sup>** values are for single-model multiscale performance on the [DOTAv1 test set](https://captain-whu.github.io/DOTA/dataset.html). <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to the [DOTA evaluation server](https://captain-whu.github.io/DOTA/evaluation.html).
|
|
298
298
|
- **Speed** metrics are averaged over [DOTAv1 val images](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10) using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
|
|
@@ -148,11 +148,11 @@ Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) fo
|
|
|
148
148
|
|
|
149
149
|
| Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
150
150
|
| -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
151
|
-
| [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.
|
|
152
|
-
| [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.
|
|
153
|
-
| [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 |
|
|
154
|
-
| [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 |
|
|
155
|
-
| [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 |
|
|
151
|
+
| [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.7 | 9.1 |
|
|
152
|
+
| [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.4 | 34.2 |
|
|
153
|
+
| [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 23.6 | 121.5 |
|
|
154
|
+
| [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 28.0 | 139.8 |
|
|
155
|
+
| [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 62.8 | 313.5 |
|
|
156
156
|
|
|
157
157
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val segment data=coco.yaml device=0`
|
|
158
158
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val segment data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -182,11 +182,11 @@ See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usa
|
|
|
182
182
|
|
|
183
183
|
| Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
184
184
|
| ---------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
185
|
-
| [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 |
|
|
186
|
-
| [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.
|
|
187
|
-
| [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.
|
|
188
|
-
| [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.
|
|
189
|
-
| [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.
|
|
185
|
+
| [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 57.2 | 83.3 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
|
|
186
|
+
| [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.0 | 86.6 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
|
|
187
|
+
| [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.6 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
|
|
188
|
+
| [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.5 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
|
|
189
|
+
| [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.6 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
|
|
190
190
|
|
|
191
191
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO Keypoints val2017](https://docs.ultralytics.com/datasets/pose/coco/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val pose data=coco-pose.yaml device=0`
|
|
192
192
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
|
|
@@ -200,10 +200,10 @@ Check the [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples
|
|
|
200
200
|
| Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
201
201
|
| -------------------------------------------------------------------------------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
202
202
|
| [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
|
|
203
|
-
| [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 |
|
|
203
|
+
| [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 80.9 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
|
|
204
204
|
| [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
|
|
205
|
-
| [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 81.
|
|
206
|
-
| [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 |
|
|
205
|
+
| [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 81.6 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
|
|
206
|
+
| [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 81.7 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
|
|
207
207
|
|
|
208
208
|
- **mAP<sup>test</sup>** values are for single-model multiscale performance on the [DOTAv1 test set](https://captain-whu.github.io/DOTA/dataset.html). <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to the [DOTA evaluation server](https://captain-whu.github.io/DOTA/evaluation.html).
|
|
209
209
|
- **Speed** metrics are averaged over [DOTAv1 val images](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10) using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
|
{ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/tests/test_engine.py
RENAMED
|
@@ -5,7 +5,7 @@ from unittest import mock
|
|
|
5
5
|
|
|
6
6
|
import torch
|
|
7
7
|
|
|
8
|
-
from tests import MODEL
|
|
8
|
+
from tests import MODEL, SOURCE
|
|
9
9
|
from ultralytics import YOLO
|
|
10
10
|
from ultralytics.cfg import get_cfg
|
|
11
11
|
from ultralytics.engine.exporter import Exporter
|
|
@@ -24,7 +24,7 @@ def test_export():
|
|
|
24
24
|
exporter.add_callback("on_export_start", test_func)
|
|
25
25
|
assert test_func in exporter.callbacks["on_export_start"], "callback test failed"
|
|
26
26
|
f = exporter(model=YOLO("yolo26n.yaml").model)
|
|
27
|
-
YOLO(f)(
|
|
27
|
+
YOLO(f)(SOURCE) # exported model inference
|
|
28
28
|
|
|
29
29
|
|
|
30
30
|
def test_detect():
|
{ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/tests/test_exports.py
RENAMED
|
@@ -144,7 +144,9 @@ def test_export_coreml_matrix(task, dynamic, int8, half, nms, batch):
|
|
|
144
144
|
|
|
145
145
|
|
|
146
146
|
@pytest.mark.slow
|
|
147
|
-
@pytest.mark.skipif(
|
|
147
|
+
@pytest.mark.skipif(
|
|
148
|
+
not checks.IS_PYTHON_MINIMUM_3_10 or not TORCH_1_13, reason="TFLite export requires Python>=3.10 and torch>=1.13"
|
|
149
|
+
)
|
|
148
150
|
@pytest.mark.skipif(
|
|
149
151
|
not LINUX or IS_RASPBERRYPI,
|
|
150
152
|
reason="Test disabled as TF suffers from install conflicts on Windows, macOS and Raspberry Pi",
|
|
@@ -238,7 +240,6 @@ def test_export_mnn_matrix(task, int8, half, batch):
|
|
|
238
240
|
|
|
239
241
|
|
|
240
242
|
@pytest.mark.slow
|
|
241
|
-
@pytest.mark.skipif(ARM64, reason="NCNN not supported on ARM64") # https://github.com/Tencent/ncnn/issues/6509
|
|
242
243
|
@pytest.mark.skipif(not TORCH_2_0, reason="NCNN inference causes segfault on PyTorch<2.0")
|
|
243
244
|
def test_export_ncnn():
|
|
244
245
|
"""Test YOLO export to NCNN format."""
|
|
@@ -247,7 +248,6 @@ def test_export_ncnn():
|
|
|
247
248
|
|
|
248
249
|
|
|
249
250
|
@pytest.mark.slow
|
|
250
|
-
@pytest.mark.skipif(ARM64, reason="NCNN not supported on ARM64") # https://github.com/Tencent/ncnn/issues/6509
|
|
251
251
|
@pytest.mark.skipif(not TORCH_2_0, reason="NCNN inference causes segfault on PyTorch<2.0")
|
|
252
252
|
@pytest.mark.parametrize("task, half, batch", list(product(TASKS, [True, False], [1])))
|
|
253
253
|
def test_export_ncnn_matrix(task, half, batch):
|
{ultralytics_opencv_headless-8.4.0 → ultralytics_opencv_headless-8.4.2}/ultralytics/cfg/__init__.py
RENAMED
|
@@ -305,8 +305,6 @@ def get_cfg(
|
|
|
305
305
|
# Merge overrides
|
|
306
306
|
if overrides:
|
|
307
307
|
overrides = cfg2dict(overrides)
|
|
308
|
-
if "save_dir" not in cfg:
|
|
309
|
-
overrides.pop("save_dir", None) # special override keys to ignore
|
|
310
308
|
check_dict_alignment(cfg, overrides)
|
|
311
309
|
cfg = {**cfg, **overrides} # merge cfg and overrides dicts (prefer overrides)
|
|
312
310
|
|
|
@@ -494,7 +492,7 @@ def check_dict_alignment(
|
|
|
494
492
|
base_keys, custom_keys = (frozenset(x.keys()) for x in (base, custom))
|
|
495
493
|
# Allow 'augmentations' as a valid custom parameter for custom Albumentations transforms
|
|
496
494
|
if allowed_custom_keys is None:
|
|
497
|
-
allowed_custom_keys = {"augmentations"}
|
|
495
|
+
allowed_custom_keys = {"augmentations", "save_dir"}
|
|
498
496
|
if mismatched := [k for k in custom_keys if k not in base_keys and k not in allowed_custom_keys]:
|
|
499
497
|
from difflib import get_close_matches
|
|
500
498
|
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
2
|
|
|
3
|
-
# Ultralytics YOLO26
|
|
3
|
+
# Ultralytics YOLO26-cls image classification model
|
|
4
4
|
# Model docs: https://docs.ultralytics.com/models/yolo26
|
|
5
5
|
# Task docs: https://docs.ultralytics.com/tasks/classify
|
|
6
6
|
|
|
7
7
|
# Parameters
|
|
8
8
|
nc: 1000 # number of classes
|
|
9
|
-
scales: # model compound scaling constants, i.e. 'model=
|
|
9
|
+
scales: # model compound scaling constants, i.e. 'model=yolo26n-cls.yaml' will call yolo26-cls.yaml with scale 'n'
|
|
10
10
|
# [depth, width, max_channels]
|
|
11
11
|
n: [0.50, 0.25, 1024] # summary: 86 layers, 2,812,104 parameters, 2,812,104 gradients, 0.5 GFLOPs
|
|
12
12
|
s: [0.50, 0.50, 1024] # summary: 86 layers, 6,724,008 parameters, 6,724,008 gradients, 1.6 GFLOPs
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
2
|
|
|
3
|
-
# Ultralytics YOLO26
|
|
3
|
+
# Ultralytics YOLO26-obb Oriented Bounding Boxes (OBB) model with P3/8 - P5/32 outputs
|
|
4
4
|
# Model docs: https://docs.ultralytics.com/models/yolo26
|
|
5
5
|
# Task docs: https://docs.ultralytics.com/tasks/obb
|
|
6
6
|
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
nc: 80 # number of classes
|
|
9
9
|
end2end: True # whether to use end-to-end mode
|
|
10
10
|
reg_max: 1 # DFL bins
|
|
11
|
-
scales: # model compound scaling constants, i.e. 'model=
|
|
11
|
+
scales: # model compound scaling constants, i.e. 'model=yolo26n-obb.yaml' will call yolo26-obb.yaml with scale 'n'
|
|
12
12
|
# [depth, width, max_channels]
|
|
13
13
|
n: [0.50, 0.25, 1024] # summary: 291 layers, 2,715,614 parameters, 2,715,614 gradients, 16.9 GFLOPs
|
|
14
14
|
s: [0.50, 0.50, 1024] # summary: 291 layers, 10,582,142 parameters, 10,582,142 gradients, 63.5 GFLOPs
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
2
|
|
|
3
|
-
# Ultralytics YOLO26 object detection model with
|
|
3
|
+
# Ultralytics YOLO26 object detection model with P2/4 - P5/32 outputs
|
|
4
4
|
# Model docs: https://docs.ultralytics.com/models/yolo26
|
|
5
5
|
# Task docs: https://docs.ultralytics.com/tasks/detect
|
|
6
6
|
|
|
@@ -8,13 +8,13 @@
|
|
|
8
8
|
nc: 80 # number of classes
|
|
9
9
|
end2end: True # whether to use end-to-end mode
|
|
10
10
|
reg_max: 1 # DFL bins
|
|
11
|
-
scales: # model compound scaling constants, i.e. 'model=
|
|
11
|
+
scales: # model compound scaling constants, i.e. 'model=yolo26n-p2.yaml' will call yolo26-p2.yaml with scale 'n'
|
|
12
12
|
# [depth, width, max_channels]
|
|
13
|
-
n: [0.50, 0.25, 1024] # summary:
|
|
14
|
-
s: [0.50, 0.50, 1024] # summary:
|
|
15
|
-
m: [0.50, 1.00, 512] # summary:
|
|
16
|
-
l: [1.00, 1.00, 512] # summary:
|
|
17
|
-
x: [1.00, 1.50, 512] # summary:
|
|
13
|
+
n: [0.50, 0.25, 1024] # summary: 329 layers, 2,662,400 parameters, 2,662,400 gradients, 9.5 GFLOPs
|
|
14
|
+
s: [0.50, 0.50, 1024] # summary: 329 layers, 9,765,856 parameters, 9,765,856 gradients, 27.8 GFLOPs
|
|
15
|
+
m: [0.50, 1.00, 512] # summary: 349 layers, 21,144,288 parameters, 21,144,288 gradients, 91.4 GFLOPs
|
|
16
|
+
l: [1.00, 1.00, 512] # summary: 489 layers, 25,815,520 parameters, 25,815,520 gradients, 115.3 GFLOPs
|
|
17
|
+
x: [1.00, 1.50, 512] # summary: 489 layers, 57,935,232 parameters, 57,935,232 gradients, 256.9 GFLOPs
|
|
18
18
|
|
|
19
19
|
# YOLO26n backbone
|
|
20
20
|
backbone:
|
|
@@ -42,12 +42,12 @@ head:
|
|
|
42
42
|
- [-1, 2, C3k2, [256, True]] # 16 (P3/8-small)
|
|
43
43
|
|
|
44
44
|
- [-1, 1, nn.Upsample, [None, 2, "nearest"]]
|
|
45
|
-
- [[-1, 2], 1, Concat, [1]] # cat backbone
|
|
46
|
-
- [-1, 2, C3k2, [128, True]] # 19 (
|
|
45
|
+
- [[-1, 2], 1, Concat, [1]] # cat backbone P2
|
|
46
|
+
- [-1, 2, C3k2, [128, True]] # 19 (P2/4-xsmall)
|
|
47
47
|
|
|
48
48
|
- [-1, 1, Conv, [128, 3, 2]]
|
|
49
49
|
- [[-1, 16], 1, Concat, [1]] # cat head P3
|
|
50
|
-
- [-1, 2, C3k2, [256, True]] # 22 (
|
|
50
|
+
- [-1, 2, C3k2, [256, True]] # 22 (P3/8-small)
|
|
51
51
|
|
|
52
52
|
- [-1, 1, Conv, [256, 3, 2]]
|
|
53
53
|
- [[-1, 13], 1, Concat, [1]] # cat head P4
|
|
@@ -57,4 +57,4 @@ head:
|
|
|
57
57
|
- [[-1, 10], 1, Concat, [1]] # cat head P5
|
|
58
58
|
- [-1, 1, C3k2, [1024, True, 0.5, True]] # 28 (P5/32-large)
|
|
59
59
|
|
|
60
|
-
- [[19, 22, 25, 28], 1, Detect, [nc]] # Detect(P3, P4, P5)
|
|
60
|
+
- [[19, 22, 25, 28], 1, Detect, [nc]] # Detect(P2, P3, P4, P5)
|
|
@@ -6,13 +6,15 @@
|
|
|
6
6
|
|
|
7
7
|
# Parameters
|
|
8
8
|
nc: 80 # number of classes
|
|
9
|
-
|
|
9
|
+
end2end: True # whether to use end-to-end mode
|
|
10
|
+
reg_max: 1 # DFL bins
|
|
11
|
+
scales: # model compound scaling constants, i.e. 'model=yolo26n-p6.yaml' will call yolo26-p6.yaml with scale 'n'
|
|
10
12
|
# [depth, width, max_channels]
|
|
11
|
-
n: [0.50, 0.25, 1024] # summary:
|
|
12
|
-
s: [0.50, 0.50, 1024] # summary:
|
|
13
|
-
m: [0.50, 1.00, 512] # summary:
|
|
14
|
-
l: [1.00, 1.00, 512] # summary:
|
|
15
|
-
x: [1.00, 1.50, 512] # summary:
|
|
13
|
+
n: [0.50, 0.25, 1024] # summary: 349 layers, 4,063,872 parameters, 4,063,872 gradients, 6.0 GFLOPs
|
|
14
|
+
s: [0.50, 0.50, 1024] # summary: 349 layers, 15,876,448 parameters, 15,876,448 gradients, 22.3 GFLOPs
|
|
15
|
+
m: [0.50, 1.00, 512] # summary: 369 layers, 32,400,096 parameters, 32,400,096 gradients, 77.3 GFLOPs
|
|
16
|
+
l: [1.00, 1.00, 512] # summary: 523 layers, 39,365,600 parameters, 39,365,600 gradients, 97.0 GFLOPs
|
|
17
|
+
x: [1.00, 1.50, 512] # summary: 523 layers, 88,330,368 parameters, 88,330,368 gradients, 216.6 GFLOPs
|
|
16
18
|
|
|
17
19
|
# YOLO26n backbone
|
|
18
20
|
backbone:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
2
|
|
|
3
|
-
# Ultralytics YOLO26
|
|
3
|
+
# Ultralytics YOLO26-pose keypoints/pose estimation model with P3/8 - P5/32 outputs
|
|
4
4
|
# Model docs: https://docs.ultralytics.com/models/yolo26
|
|
5
5
|
# Task docs: https://docs.ultralytics.com/tasks/pose
|
|
6
6
|
|
|
@@ -9,7 +9,7 @@ nc: 80 # number of classes
|
|
|
9
9
|
end2end: True # whether to use end-to-end mode
|
|
10
10
|
reg_max: 1 # DFL bins
|
|
11
11
|
kpt_shape: [17, 3] # number of keypoints, number of dims (2 for x,y or 3 for x,y,visible)
|
|
12
|
-
scales: # model compound scaling constants, i.e. 'model=
|
|
12
|
+
scales: # model compound scaling constants, i.e. 'model=yolo26n-pose.yaml' will call yolo26-pose.yaml with scale 'n'
|
|
13
13
|
# [depth, width, max_channels]
|
|
14
14
|
n: [0.50, 0.25, 1024] # summary: 363 layers, 3,747,554 parameters, 3,747,554 gradients, 10.7 GFLOPs
|
|
15
15
|
s: [0.50, 0.50, 1024] # summary: 363 layers, 11,870,498 parameters, 11,870,498 gradients, 29.6 GFLOPs
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
2
|
|
|
3
|
-
# Ultralytics YOLO26
|
|
3
|
+
# Ultralytics YOLO26-seg instance segmentation model with P3/8 - P5/32 outputs
|
|
4
4
|
# Model docs: https://docs.ultralytics.com/models/yolo26
|
|
5
5
|
# Task docs: https://docs.ultralytics.com/tasks/segment
|
|
6
6
|
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
nc: 80 # number of classes
|
|
9
9
|
end2end: True # whether to use end-to-end mode
|
|
10
10
|
reg_max: 1 # DFL bins
|
|
11
|
-
scales: # model compound scaling constants, i.e. 'model=
|
|
11
|
+
scales: # model compound scaling constants, i.e. 'model=yolo26n-seg.yaml' will call yolo26-seg.yaml with scale 'n'
|
|
12
12
|
# [depth, width, max_channels]
|
|
13
13
|
n: [0.50, 0.25, 1024] # summary: 309 layers, 3,126,280 parameters, 3,126,280 gradients, 10.5 GFLOPs
|
|
14
14
|
s: [0.50, 0.50, 1024] # summary: 309 layers, 11,505,800 parameters, 11,505,800 gradients, 37.4 GFLOPs
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
nc: 80 # number of classes
|
|
9
9
|
end2end: True # whether to use end-to-end mode
|
|
10
10
|
reg_max: 1 # DFL bins
|
|
11
|
-
scales: # model compound scaling constants, i.e. 'model=
|
|
11
|
+
scales: # model compound scaling constants, i.e. 'model=yolo26n.yaml' will call yolo26.yaml with scale 'n'
|
|
12
12
|
# [depth, width, max_channels]
|
|
13
13
|
n: [0.50, 0.25, 1024] # summary: 260 layers, 2,572,280 parameters, 2,572,280 gradients, 6.1 GFLOPs
|
|
14
14
|
s: [0.50, 0.50, 1024] # summary: 260 layers, 10,009,784 parameters, 10,009,784 gradients, 22.8 GFLOPs
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
2
|
|
|
3
|
-
# Ultralytics
|
|
3
|
+
# Ultralytics YOLOE-26 open-vocabulary instance segmentation model with P3/8 - P5/32 outputs
|
|
4
4
|
# Model docs: https://docs.ultralytics.com/models/yolo26
|
|
5
5
|
# Task docs: https://docs.ultralytics.com/tasks/segment
|
|
6
6
|
|
|
@@ -9,15 +9,15 @@ nc: 80 # number of classes
|
|
|
9
9
|
end2end: True # whether to use end-to-end mode
|
|
10
10
|
reg_max: 1 # DFL bins
|
|
11
11
|
text_model: mobileclip2:b
|
|
12
|
-
scales: # model compound scaling constants, i.e. 'model=
|
|
12
|
+
scales: # model compound scaling constants, i.e. 'model=yoloe-26n-seg.yaml' will call yoloe-26-seg.yaml with scale 'n'
|
|
13
13
|
# [depth, width, max_channels]
|
|
14
|
-
n: [0.50, 0.25, 1024] # summary:
|
|
15
|
-
s: [0.50, 0.50, 1024] # summary:
|
|
16
|
-
m: [0.50, 1.00, 512] # summary:
|
|
17
|
-
l: [1.00, 1.00, 512] # summary:
|
|
18
|
-
x: [1.00, 1.50, 512] # summary:
|
|
14
|
+
n: [0.50, 0.25, 1024] # summary: 347 layers, 5,615,540 parameters, 5,615,540 gradients, 11.7 GFLOPs
|
|
15
|
+
s: [0.50, 0.50, 1024] # summary: 347 layers, 15,272,852 parameters, 15,272,852 gradients, 39.3 GFLOPs
|
|
16
|
+
m: [0.50, 1.00, 512] # summary: 367 layers, 34,922,132 parameters, 34,922,132 gradients, 136.3 GFLOPs
|
|
17
|
+
l: [1.00, 1.00, 512] # summary: 479 layers, 39,325,588 parameters, 39,325,588 gradients, 154.7 GFLOPs
|
|
18
|
+
x: [1.00, 1.50, 512] # summary: 479 layers, 85,397,684 parameters, 85,397,684 gradients, 343.3 GFLOPs
|
|
19
19
|
|
|
20
|
-
#
|
|
20
|
+
# YOLOE26n backbone
|
|
21
21
|
backbone:
|
|
22
22
|
# [from, repeats, module, args]
|
|
23
23
|
- [-1, 1, Conv, [64, 3, 2]] # 0-P1/2
|
|
@@ -32,7 +32,7 @@ backbone:
|
|
|
32
32
|
- [-1, 1, SPPF, [1024, 5, 3, True]] # 9
|
|
33
33
|
- [-1, 2, C2PSA, [1024]] # 10
|
|
34
34
|
|
|
35
|
-
#
|
|
35
|
+
# YOLOE26n head
|
|
36
36
|
head:
|
|
37
37
|
- [-1, 1, nn.Upsample, [None, 2, "nearest"]]
|
|
38
38
|
- [[-1, 6], 1, Concat, [1]] # cat backbone P4
|
|
@@ -50,4 +50,4 @@ head:
|
|
|
50
50
|
- [[-1, 10], 1, Concat, [1]] # cat head P5
|
|
51
51
|
- [-1, 1, C3k2, [1024, True, 0.5, True]] # 22 (P5/32-large)
|
|
52
52
|
|
|
53
|
-
- [[16, 19, 22], 1, YOLOESegment26, [nc, 32, 256, 512, True]] #
|
|
53
|
+
- [[16, 19, 22], 1, YOLOESegment26, [nc, 32, 256, 512, True]] # YOLOESegment26(P3, P4, P5)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
2
|
|
|
3
|
-
# Ultralytics
|
|
3
|
+
# Ultralytics YOLOE-26 open-vocabulary object detection model with P3/8 - P5/32 outputs
|
|
4
4
|
# Model docs: https://docs.ultralytics.com/models/yolo26
|
|
5
5
|
# Task docs: https://docs.ultralytics.com/tasks/detect
|
|
6
6
|
|
|
@@ -9,15 +9,15 @@ nc: 80 # number of classes
|
|
|
9
9
|
end2end: True # whether to use end-to-end mode
|
|
10
10
|
reg_max: 1 # DFL bins
|
|
11
11
|
text_model: mobileclip2:b
|
|
12
|
-
scales: # model compound scaling constants, i.e. 'model=
|
|
12
|
+
scales: # model compound scaling constants, i.e. 'model=yoloe-26n.yaml' will call yoloe-26.yaml with scale 'n'
|
|
13
13
|
# [depth, width, max_channels]
|
|
14
|
-
n: [0.50, 0.25, 1024] # summary:
|
|
15
|
-
s: [0.50, 0.50, 1024] # summary:
|
|
16
|
-
m: [0.50, 1.00, 512] # summary:
|
|
17
|
-
l: [1.00, 1.00, 512] # summary:
|
|
18
|
-
x: [1.00, 1.50, 512] # summary:
|
|
14
|
+
n: [0.50, 0.25, 1024] # summary: 298 layers, 5,061,540 parameters, 5,061,540 gradients, 7.3 GFLOPs
|
|
15
|
+
s: [0.50, 0.50, 1024] # summary: 298 layers, 13,776,836 parameters, 13,776,836 gradients, 24.8 GFLOPs
|
|
16
|
+
m: [0.50, 1.00, 512] # summary: 318 layers, 29,706,308 parameters, 29,706,308 gradients, 79.2 GFLOPs
|
|
17
|
+
l: [1.00, 1.00, 512] # summary: 430 layers, 34,109,764 parameters, 34,109,764 gradients, 97.6 GFLOPs
|
|
18
|
+
x: [1.00, 1.50, 512] # summary: 430 layers, 73,697,252 parameters, 73,697,252 gradients, 215.2 GFLOPs
|
|
19
19
|
|
|
20
|
-
#
|
|
20
|
+
# YOLOE26n backbone
|
|
21
21
|
backbone:
|
|
22
22
|
# [from, repeats, module, args]
|
|
23
23
|
- [-1, 1, Conv, [64, 3, 2]] # 0-P1/2
|
|
@@ -32,7 +32,7 @@ backbone:
|
|
|
32
32
|
- [-1, 1, SPPF, [1024, 5, 3, True]] # 9
|
|
33
33
|
- [-1, 2, C2PSA, [1024]] # 10
|
|
34
34
|
|
|
35
|
-
#
|
|
35
|
+
# YOLOE26n head
|
|
36
36
|
head:
|
|
37
37
|
- [-1, 1, nn.Upsample, [None, 2, "nearest"]]
|
|
38
38
|
- [[-1, 6], 1, Concat, [1]] # cat backbone P4
|
|
@@ -50,4 +50,4 @@ head:
|
|
|
50
50
|
- [[-1, 10], 1, Concat, [1]] # cat head P5
|
|
51
51
|
- [-1, 1, C3k2, [1024, True, 0.5, True]] # 22 (P5/32-large)
|
|
52
52
|
|
|
53
|
-
- [[16, 19, 22], 1, YOLOEDetect, [nc, 512, True]] #
|
|
53
|
+
- [[16, 19, 22], 1, YOLOEDetect, [nc, 512, True]] # YOLOEDetect(P3, P4, P5)
|
|
@@ -749,12 +749,13 @@ def convert_to_multispectral(path: str | Path, n_channels: int = 10, replace: bo
|
|
|
749
749
|
async def convert_ndjson_to_yolo(ndjson_path: str | Path, output_path: str | Path | None = None) -> Path:
|
|
750
750
|
"""Convert NDJSON dataset format to Ultralytics YOLO11 dataset structure.
|
|
751
751
|
|
|
752
|
-
This function converts datasets stored in NDJSON (Newline Delimited JSON) format to the standard YOLO format
|
|
753
|
-
separate directories for images and labels.
|
|
754
|
-
|
|
752
|
+
This function converts datasets stored in NDJSON (Newline Delimited JSON) format to the standard YOLO format. For
|
|
753
|
+
detection/segmentation/pose/obb tasks, it creates separate directories for images and labels. For classification
|
|
754
|
+
tasks, it creates the ImageNet-style {split}/{class_name}/ folder structure. It supports parallel processing for
|
|
755
|
+
efficient conversion of large datasets and can download images from URLs.
|
|
755
756
|
|
|
756
757
|
The NDJSON format consists of:
|
|
757
|
-
- First line: Dataset metadata with class names and configuration
|
|
758
|
+
- First line: Dataset metadata with class names, task type, and configuration
|
|
758
759
|
- Subsequent lines: Individual image records with annotations and optional URLs
|
|
759
760
|
|
|
760
761
|
Args:
|
|
@@ -763,7 +764,7 @@ async def convert_ndjson_to_yolo(ndjson_path: str | Path, output_path: str | Pat
|
|
|
763
764
|
None, uses the parent directory of the NDJSON file. Defaults to None.
|
|
764
765
|
|
|
765
766
|
Returns:
|
|
766
|
-
(Path): Path to the generated data.yaml file
|
|
767
|
+
(Path): Path to the generated data.yaml file (detection) or dataset directory (classification).
|
|
767
768
|
|
|
768
769
|
Examples:
|
|
769
770
|
Convert a local NDJSON file:
|
|
@@ -790,36 +791,51 @@ async def convert_ndjson_to_yolo(ndjson_path: str | Path, output_path: str | Pat
|
|
|
790
791
|
dataset_dir = output_path / ndjson_path.stem
|
|
791
792
|
splits = {record["split"] for record in image_records}
|
|
792
793
|
|
|
793
|
-
#
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
data_yaml["names"] = {int(k): v for k, v in dataset_record.get("class_names", {}).items()}
|
|
797
|
-
data_yaml.pop("class_names")
|
|
794
|
+
# Check if this is a classification dataset
|
|
795
|
+
is_classification = dataset_record.get("task") == "classify"
|
|
796
|
+
class_names = {int(k): v for k, v in dataset_record.get("class_names", {}).items()}
|
|
798
797
|
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
798
|
+
# Create base directories
|
|
799
|
+
dataset_dir.mkdir(parents=True, exist_ok=True)
|
|
800
|
+
data_yaml = None
|
|
801
|
+
|
|
802
|
+
if not is_classification:
|
|
803
|
+
# Detection/segmentation/pose/obb: prepare YAML and create base structure
|
|
804
|
+
data_yaml = dict(dataset_record)
|
|
805
|
+
data_yaml["names"] = class_names
|
|
806
|
+
data_yaml.pop("class_names", None)
|
|
807
|
+
data_yaml.pop("type", None) # Remove NDJSON-specific fields
|
|
808
|
+
for split in sorted(splits):
|
|
809
|
+
(dataset_dir / "images" / split).mkdir(parents=True, exist_ok=True)
|
|
810
|
+
(dataset_dir / "labels" / split).mkdir(parents=True, exist_ok=True)
|
|
811
|
+
data_yaml[split] = f"images/{split}"
|
|
803
812
|
|
|
804
813
|
async def process_record(session, semaphore, record):
|
|
805
814
|
"""Process single image record with async session."""
|
|
806
815
|
async with semaphore:
|
|
807
816
|
split, original_name = record["split"], record["file"]
|
|
808
|
-
label_path = dataset_dir / "labels" / split / f"{Path(original_name).stem}.txt"
|
|
809
|
-
image_path = dataset_dir / "images" / split / original_name
|
|
810
|
-
|
|
811
817
|
annotations = record.get("annotations", {})
|
|
812
|
-
lines_to_write = []
|
|
813
|
-
for key in annotations.keys():
|
|
814
|
-
lines_to_write = [" ".join(map(str, item)) for item in annotations[key]]
|
|
815
|
-
break
|
|
816
|
-
if "classification" in annotations:
|
|
817
|
-
lines_to_write = [str(cls) for cls in annotations["classification"]]
|
|
818
|
-
|
|
819
|
-
label_path.write_text("\n".join(lines_to_write) + "\n" if lines_to_write else "")
|
|
820
818
|
|
|
819
|
+
if is_classification:
|
|
820
|
+
# Classification: place image in {split}/{class_name}/ folder
|
|
821
|
+
class_ids = annotations.get("classification", [])
|
|
822
|
+
class_id = class_ids[0] if class_ids else 0
|
|
823
|
+
class_name = class_names.get(class_id, str(class_id))
|
|
824
|
+
image_path = dataset_dir / split / class_name / original_name
|
|
825
|
+
else:
|
|
826
|
+
# Detection: write label file and place image in images/{split}/
|
|
827
|
+
image_path = dataset_dir / "images" / split / original_name
|
|
828
|
+
label_path = dataset_dir / "labels" / split / f"{Path(original_name).stem}.txt"
|
|
829
|
+
lines_to_write = []
|
|
830
|
+
for key in annotations.keys():
|
|
831
|
+
lines_to_write = [" ".join(map(str, item)) for item in annotations[key]]
|
|
832
|
+
break
|
|
833
|
+
label_path.write_text("\n".join(lines_to_write) + "\n" if lines_to_write else "")
|
|
834
|
+
|
|
835
|
+
# Download image if URL provided and file doesn't exist
|
|
821
836
|
if http_url := record.get("url"):
|
|
822
837
|
if not image_path.exists():
|
|
838
|
+
image_path.parent.mkdir(parents=True, exist_ok=True) # Ensure parent dir exists
|
|
823
839
|
try:
|
|
824
840
|
async with session.get(http_url, timeout=aiohttp.ClientTimeout(total=30)) as response:
|
|
825
841
|
response.raise_for_status()
|
|
@@ -848,8 +864,11 @@ async def convert_ndjson_to_yolo(ndjson_path: str | Path, output_path: str | Pat
|
|
|
848
864
|
await asyncio.gather(*[tracked_process(record) for record in image_records])
|
|
849
865
|
pbar.close()
|
|
850
866
|
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
867
|
+
if is_classification:
|
|
868
|
+
# Classification: return dataset directory (check_cls_dataset expects a directory path)
|
|
869
|
+
return dataset_dir
|
|
870
|
+
else:
|
|
871
|
+
# Detection: write data.yaml and return its path
|
|
872
|
+
yaml_path = dataset_dir / "data.yaml"
|
|
873
|
+
YAML.save(yaml_path, data_yaml)
|
|
874
|
+
return yaml_path
|
|
@@ -463,6 +463,9 @@ class Exporter:
|
|
|
463
463
|
)
|
|
464
464
|
if tfjs and (ARM64 and LINUX):
|
|
465
465
|
raise SystemError("TF.js exports are not currently supported on ARM64 Linux")
|
|
466
|
+
if ncnn and hasattr(model.model[-1], "one2one_cv2"):
|
|
467
|
+
del model.model[-1].one2one_cv2 # Disable end2end branch for NCNN export as it does not support topk
|
|
468
|
+
LOGGER.warning("NCNN export does not support end2end models, disabling end2end branch.")
|
|
466
469
|
# Recommend OpenVINO if export and Intel CPU
|
|
467
470
|
if SETTINGS.get("openvino_msg"):
|
|
468
471
|
if is_intel():
|
|
@@ -791,7 +794,6 @@ class Exporter:
|
|
|
791
794
|
f".*{head_module_name}/.*/Sub*",
|
|
792
795
|
f".*{head_module_name}/.*/Mul*",
|
|
793
796
|
f".*{head_module_name}/.*/Div*",
|
|
794
|
-
f".*{head_module_name}\\.dfl.*",
|
|
795
797
|
],
|
|
796
798
|
types=["Sigmoid"],
|
|
797
799
|
)
|
|
@@ -864,10 +866,6 @@ class Exporter:
|
|
|
864
866
|
@try_export
|
|
865
867
|
def export_ncnn(self, prefix=colorstr("NCNN:")):
|
|
866
868
|
"""Export YOLO model to NCNN format using PNNX https://github.com/pnnx/pnnx."""
|
|
867
|
-
if ARM64:
|
|
868
|
-
raise NotImplementedError(
|
|
869
|
-
"NCNN export is not supported on ARM64"
|
|
870
|
-
) # https://github.com/Tencent/ncnn/issues/6509
|
|
871
869
|
check_requirements("ncnn", cmds="--no-deps") # no deps to avoid installing opencv-python
|
|
872
870
|
check_requirements("pnnx")
|
|
873
871
|
import ncnn
|