ultralytics 8.2.87__tar.gz → 8.2.89__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ultralytics might be problematic. Click here for more details.
- {ultralytics-8.2.87/ultralytics.egg-info → ultralytics-8.2.89}/PKG-INFO +15 -15
- {ultralytics-8.2.87 → ultralytics-8.2.89}/README.md +12 -12
- {ultralytics-8.2.87 → ultralytics-8.2.89}/pyproject.toml +2 -2
- {ultralytics-8.2.87 → ultralytics-8.2.89}/tests/test_python.py +2 -1
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/__init__.py +1 -1
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/Objects365.yaml +3 -3
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/converter.py +5 -8
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/engine/exporter.py +3 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/hub/google/__init__.py +2 -2
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/hub/session.py +1 -1
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/fastsam/predict.py +1 -1
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/modules/blocks.py +3 -5
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/modules/decoders.py +4 -5
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/modules/encoders.py +2 -3
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/modules/sam.py +14 -22
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/classify/predict.py +4 -4
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/classify/train.py +5 -1
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/nn/modules/activation.py +1 -2
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/nn/tasks.py +2 -5
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/solutions/distance_calculation.py +19 -56
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/solutions/object_counter.py +24 -36
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/__init__.py +3 -3
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/checks.py +6 -7
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/plotting.py +19 -19
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/torch_utils.py +2 -2
- {ultralytics-8.2.87 → ultralytics-8.2.89/ultralytics.egg-info}/PKG-INFO +15 -15
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics.egg-info/requires.txt +2 -2
- {ultralytics-8.2.87 → ultralytics-8.2.89}/LICENSE +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/setup.cfg +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/tests/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/tests/conftest.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/tests/test_cli.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/tests/test_cuda.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/tests/test_engine.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/tests/test_explorer.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/tests/test_exports.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/tests/test_integrations.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/tests/test_solutions.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/assets/bus.jpg +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/assets/zidane.jpg +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/Argoverse.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/DOTAv1.5.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/DOTAv1.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/GlobalWheat2020.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/ImageNet.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/SKU-110K.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/VOC.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/VisDrone.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/african-wildlife.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/brain-tumor.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/carparts-seg.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/coco-pose.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/coco.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/coco128-seg.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/coco128.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/coco8-pose.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/coco8-seg.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/coco8.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/crack-seg.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/dota8.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/lvis.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/open-images-v7.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/package-seg.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/signature.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/tiger-pose.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/datasets/xView.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/default.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v10/yolov10b.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v10/yolov10l.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v10/yolov10m.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v10/yolov10n.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v10/yolov10s.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v10/yolov10x.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v3/yolov3-spp.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v3/yolov3-tiny.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v3/yolov3.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v5/yolov5-p6.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v5/yolov5.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v6/yolov6.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-cls.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-ghost.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-obb.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-p2.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-p6.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-pose.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-seg.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-world.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8-worldv2.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v8/yolov8.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v9/yolov9c-seg.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v9/yolov9c.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v9/yolov9e-seg.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v9/yolov9e.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v9/yolov9m.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v9/yolov9s.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/models/v9/yolov9t.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/trackers/botsort.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/cfg/trackers/bytetrack.yaml +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/annotator.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/augment.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/base.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/build.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/dataset.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/explorer/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/explorer/explorer.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/explorer/gui/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/explorer/gui/dash.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/explorer/utils.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/loaders.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/split_dota.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/data/utils.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/engine/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/engine/model.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/engine/predictor.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/engine/results.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/engine/trainer.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/engine/tuner.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/engine/validator.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/hub/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/hub/auth.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/hub/utils.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/fastsam/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/fastsam/model.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/fastsam/utils.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/fastsam/val.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/nas/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/nas/model.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/nas/predict.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/nas/val.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/rtdetr/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/rtdetr/model.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/rtdetr/predict.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/rtdetr/train.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/rtdetr/val.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/amg.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/build.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/model.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/modules/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/modules/memory_attention.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/modules/tiny_encoder.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/modules/transformer.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/modules/utils.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/sam/predict.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/utils/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/utils/loss.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/utils/ops.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/classify/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/classify/val.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/detect/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/detect/predict.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/detect/train.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/detect/val.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/model.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/obb/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/obb/predict.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/obb/train.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/obb/val.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/pose/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/pose/predict.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/pose/train.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/pose/val.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/segment/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/segment/predict.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/segment/train.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/segment/val.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/world/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/world/train.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/models/yolo/world/train_world.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/nn/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/nn/autobackend.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/nn/modules/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/nn/modules/block.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/nn/modules/conv.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/nn/modules/head.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/nn/modules/transformer.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/nn/modules/utils.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/solutions/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/solutions/ai_gym.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/solutions/analytics.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/solutions/heatmap.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/solutions/parking_management.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/solutions/queue_management.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/solutions/speed_estimation.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/solutions/streamlit_inference.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/trackers/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/trackers/basetrack.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/trackers/bot_sort.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/trackers/byte_tracker.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/trackers/track.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/trackers/utils/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/trackers/utils/gmc.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/trackers/utils/kalman_filter.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/trackers/utils/matching.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/autobatch.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/benchmarks.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/callbacks/__init__.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/callbacks/base.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/callbacks/clearml.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/callbacks/comet.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/callbacks/dvc.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/callbacks/hub.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/callbacks/mlflow.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/callbacks/neptune.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/callbacks/raytune.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/callbacks/tensorboard.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/callbacks/wb.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/dist.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/downloads.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/errors.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/files.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/instance.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/loss.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/metrics.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/ops.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/patches.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/tal.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/triton.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics/utils/tuner.py +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics.egg-info/SOURCES.txt +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics.egg-info/dependency_links.txt +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics.egg-info/entry_points.txt +0 -0
- {ultralytics-8.2.87 → ultralytics-8.2.89}/ultralytics.egg-info/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ultralytics
|
|
3
|
-
Version: 8.2.
|
|
3
|
+
Version: 8.2.89
|
|
4
4
|
Summary: Ultralytics YOLOv8 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
|
|
5
5
|
Author: Glenn Jocher, Ayush Chaurasia, Jing Qiu
|
|
6
6
|
Maintainer: Glenn Jocher, Ayush Chaurasia, Jing Qiu
|
|
@@ -37,8 +37,8 @@ Requires-Dist: pillow>=7.1.2
|
|
|
37
37
|
Requires-Dist: pyyaml>=5.3.1
|
|
38
38
|
Requires-Dist: requests>=2.23.0
|
|
39
39
|
Requires-Dist: scipy>=1.4.1
|
|
40
|
-
Requires-Dist: torch<2.4.0,>=1.8.0; sys_platform == "win32"
|
|
41
40
|
Requires-Dist: torch>=1.8.0
|
|
41
|
+
Requires-Dist: torch!=2.4.0,>=1.8.0; sys_platform == "win32"
|
|
42
42
|
Requires-Dist: torchvision>=0.9.0
|
|
43
43
|
Requires-Dist: tqdm>=4.64.0
|
|
44
44
|
Requires-Dist: psutil
|
|
@@ -56,7 +56,7 @@ Requires-Dist: mkdocs-material>=9.5.9; extra == "dev"
|
|
|
56
56
|
Requires-Dist: mkdocstrings[python]; extra == "dev"
|
|
57
57
|
Requires-Dist: mkdocs-jupyter; extra == "dev"
|
|
58
58
|
Requires-Dist: mkdocs-redirects; extra == "dev"
|
|
59
|
-
Requires-Dist: mkdocs-ultralytics-plugin>=0.1.
|
|
59
|
+
Requires-Dist: mkdocs-ultralytics-plugin>=0.1.8; extra == "dev"
|
|
60
60
|
Requires-Dist: mkdocs-macros-plugin>=1.0.5; extra == "dev"
|
|
61
61
|
Provides-Extra: export
|
|
62
62
|
Requires-Dist: onnx>=1.12.0; extra == "export"
|
|
@@ -89,7 +89,7 @@ Requires-Dist: pycocotools>=2.0.7; extra == "extra"
|
|
|
89
89
|
<img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png" alt="YOLO Vision banner"></a>
|
|
90
90
|
</p>
|
|
91
91
|
|
|
92
|
-
[中文](https://docs.ultralytics.com/zh
|
|
92
|
+
[中文](https://docs.ultralytics.com/zh) | [한국어](https://docs.ultralytics.com/ko) | [日本語](https://docs.ultralytics.com/ja) | [Русский](https://docs.ultralytics.com/ru) | [Deutsch](https://docs.ultralytics.com/de) | [Français](https://docs.ultralytics.com/fr) | [Español](https://docs.ultralytics.com/es) | [Português](https://docs.ultralytics.com/pt) | [Türkçe](https://docs.ultralytics.com/tr) | [Tiếng Việt](https://docs.ultralytics.com/vi) | [العربية](https://docs.ultralytics.com/ar) <br>
|
|
93
93
|
|
|
94
94
|
<div>
|
|
95
95
|
<a href="https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml"><img src="https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml/badge.svg" alt="Ultralytics CI"></a>
|
|
@@ -105,11 +105,11 @@ Requires-Dist: pycocotools>=2.0.7; extra == "extra"
|
|
|
105
105
|
</div>
|
|
106
106
|
<br>
|
|
107
107
|
|
|
108
|
-
[Ultralytics](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics) is a cutting-edge, state-of-the-art (SOTA) model that builds upon the success of previous YOLO versions and introduces new features and improvements to further boost performance and flexibility. YOLOv8 is designed to be fast, accurate, and easy to use, making it an excellent choice for a wide range of object detection and tracking, instance segmentation, image classification and pose estimation tasks.
|
|
108
|
+
[Ultralytics](https://www.ultralytics.com/) [YOLOv8](https://github.com/ultralytics/ultralytics) is a cutting-edge, state-of-the-art (SOTA) model that builds upon the success of previous YOLO versions and introduces new features and improvements to further boost performance and flexibility. YOLOv8 is designed to be fast, accurate, and easy to use, making it an excellent choice for a wide range of object detection and tracking, instance segmentation, image classification and pose estimation tasks.
|
|
109
109
|
|
|
110
110
|
We hope that the resources here will help you get the most out of YOLOv8. Please browse the YOLOv8 <a href="https://docs.ultralytics.com/">Docs</a> for details, raise an issue on <a href="https://github.com/ultralytics/ultralytics/issues/new/choose">GitHub</a> for support, questions, or discussions, become a member of the Ultralytics <a href="https://ultralytics.com/discord">Discord</a>, <a href="https://reddit.com/r/ultralytics">Reddit</a> and <a href="https://community.ultralytics.com">Forums</a>!
|
|
111
111
|
|
|
112
|
-
To request an Enterprise License please complete the form at [Ultralytics Licensing](https://ultralytics.com/license).
|
|
112
|
+
To request an Enterprise License please complete the form at [Ultralytics Licensing](https://www.ultralytics.com/license).
|
|
113
113
|
|
|
114
114
|
<img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png" alt="YOLOv8 performance plots"></a>
|
|
115
115
|
|
|
@@ -188,7 +188,7 @@ See YOLOv8 [Python Docs](https://docs.ultralytics.com/usage/python) for more exa
|
|
|
188
188
|
|
|
189
189
|
### Notebooks
|
|
190
190
|
|
|
191
|
-
Ultralytics provides interactive notebooks for YOLOv8, covering training, validation, tracking, and more. Each notebook is paired with a [YouTube](https://youtube.com/ultralytics?sub_confirmation=1) tutorial, making it easy to learn and implement advanced YOLOv8 features.
|
|
191
|
+
Ultralytics provides interactive notebooks for YOLOv8, covering training, validation, tracking, and more. Each notebook is paired with a [YouTube](https://www.youtube.com/ultralytics?sub_confirmation=1) tutorial, making it easy to learn and implement advanced YOLOv8 features.
|
|
192
192
|
|
|
193
193
|
| Docs | Notebook | YouTube |
|
|
194
194
|
| ---------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: |
|
|
@@ -219,7 +219,7 @@ See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examp
|
|
|
219
219
|
| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 |
|
|
220
220
|
| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 |
|
|
221
221
|
|
|
222
|
-
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org) dataset. <br>Reproduce by `yolo val detect data=coco.yaml device=0`
|
|
222
|
+
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val detect data=coco.yaml device=0`
|
|
223
223
|
- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val detect data=coco.yaml batch=1 device=0|cpu`
|
|
224
224
|
|
|
225
225
|
</details>
|
|
@@ -253,7 +253,7 @@ See [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage e
|
|
|
253
253
|
| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 |
|
|
254
254
|
| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 |
|
|
255
255
|
|
|
256
|
-
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org) dataset. <br>Reproduce by `yolo val segment data=coco-seg.yaml device=0`
|
|
256
|
+
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val segment data=coco-seg.yaml device=0`
|
|
257
257
|
- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val segment data=coco-seg.yaml batch=1 device=0|cpu`
|
|
258
258
|
|
|
259
259
|
</details>
|
|
@@ -271,7 +271,7 @@ See [Pose Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples wit
|
|
|
271
271
|
| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 |
|
|
272
272
|
| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 |
|
|
273
273
|
|
|
274
|
-
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO Keypoints val2017](https://cocodataset.org) dataset. <br>Reproduce by `yolo val pose data=coco-pose.yaml device=0`
|
|
274
|
+
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO Keypoints val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val pose data=coco-pose.yaml device=0`
|
|
275
275
|
- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
|
|
276
276
|
|
|
277
277
|
</details>
|
|
@@ -340,14 +340,14 @@ Our key integrations with leading AI platforms extend the functionality of Ultra
|
|
|
340
340
|
|
|
341
341
|
## <div align="center">Ultralytics HUB</div>
|
|
342
342
|
|
|
343
|
-
Experience seamless AI with [Ultralytics HUB](https://ultralytics.com/hub) ⭐, the all-in-one solution for data visualization, YOLOv5 and YOLOv8 🚀 model training and deployment, without any coding. Transform images into actionable insights and bring your AI visions to life with ease using our cutting-edge platform and user-friendly [Ultralytics App](https://ultralytics.com/
|
|
343
|
+
Experience seamless AI with [Ultralytics HUB](https://www.ultralytics.com/hub) ⭐, the all-in-one solution for data visualization, YOLOv5 and YOLOv8 🚀 model training and deployment, without any coding. Transform images into actionable insights and bring your AI visions to life with ease using our cutting-edge platform and user-friendly [Ultralytics App](https://www.ultralytics.com/app-install). Start your journey for **Free** now!
|
|
344
344
|
|
|
345
345
|
<a href="https://ultralytics.com/hub" target="_blank">
|
|
346
346
|
<img width="100%" src="https://github.com/ultralytics/assets/raw/main/im/ultralytics-hub.png" alt="Ultralytics HUB preview image"></a>
|
|
347
347
|
|
|
348
348
|
## <div align="center">Contribute</div>
|
|
349
349
|
|
|
350
|
-
We love your input! YOLOv5 and YOLOv8 would not be possible without help from our community. Please see our [Contributing Guide](https://docs.ultralytics.com/help/contributing) to get started, and fill out our [Survey](https://ultralytics.com/survey?utm_source=github&utm_medium=social&utm_campaign=Survey) to send us feedback on your experience. Thank you 🙏 to all our contributors!
|
|
350
|
+
We love your input! YOLOv5 and YOLOv8 would not be possible without help from our community. Please see our [Contributing Guide](https://docs.ultralytics.com/help/contributing) to get started, and fill out our [Survey](https://www.ultralytics.com/survey?utm_source=github&utm_medium=social&utm_campaign=Survey) to send us feedback on your experience. Thank you 🙏 to all our contributors!
|
|
351
351
|
|
|
352
352
|
<!-- SVG image from https://opencollective.com/ultralytics/contributors.svg?width=990 -->
|
|
353
353
|
|
|
@@ -358,12 +358,12 @@ We love your input! YOLOv5 and YOLOv8 would not be possible without help from ou
|
|
|
358
358
|
|
|
359
359
|
Ultralytics offers two licensing options to accommodate diverse use cases:
|
|
360
360
|
|
|
361
|
-
- **AGPL-3.0 License**: This [OSI-approved](https://opensource.org/
|
|
362
|
-
- **Enterprise License**: Designed for commercial use, this license permits seamless integration of Ultralytics software and AI models into commercial goods and services, bypassing the open-source requirements of AGPL-3.0. If your scenario involves embedding our solutions into a commercial offering, reach out through [Ultralytics Licensing](https://ultralytics.com/license).
|
|
361
|
+
- **AGPL-3.0 License**: This [OSI-approved](https://opensource.org/license) open-source license is ideal for students and enthusiasts, promoting open collaboration and knowledge sharing. See the [LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) file for more details.
|
|
362
|
+
- **Enterprise License**: Designed for commercial use, this license permits seamless integration of Ultralytics software and AI models into commercial goods and services, bypassing the open-source requirements of AGPL-3.0. If your scenario involves embedding our solutions into a commercial offering, reach out through [Ultralytics Licensing](https://www.ultralytics.com/license).
|
|
363
363
|
|
|
364
364
|
## <div align="center">Contact</div>
|
|
365
365
|
|
|
366
|
-
For Ultralytics bug reports and feature requests please visit [GitHub Issues](https://github.com/ultralytics/ultralytics/issues). Become a member of the Ultralytics [Discord](https://
|
|
366
|
+
For Ultralytics bug reports and feature requests please visit [GitHub Issues](https://github.com/ultralytics/ultralytics/issues). Become a member of the Ultralytics [Discord](https://discord.com/invite/ultralytics), [Reddit](https://www.reddit.com/r/ultralytics/), or [Forums](https://community.ultralytics.com/) for asking questions, sharing projects, learning discussions, or for help with all things Ultralytics!
|
|
367
367
|
|
|
368
368
|
<br>
|
|
369
369
|
<div align="center">
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
<img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png" alt="YOLO Vision banner"></a>
|
|
5
5
|
</p>
|
|
6
6
|
|
|
7
|
-
[中文](https://docs.ultralytics.com/zh
|
|
7
|
+
[中文](https://docs.ultralytics.com/zh) | [한국어](https://docs.ultralytics.com/ko) | [日本語](https://docs.ultralytics.com/ja) | [Русский](https://docs.ultralytics.com/ru) | [Deutsch](https://docs.ultralytics.com/de) | [Français](https://docs.ultralytics.com/fr) | [Español](https://docs.ultralytics.com/es) | [Português](https://docs.ultralytics.com/pt) | [Türkçe](https://docs.ultralytics.com/tr) | [Tiếng Việt](https://docs.ultralytics.com/vi) | [العربية](https://docs.ultralytics.com/ar) <br>
|
|
8
8
|
|
|
9
9
|
<div>
|
|
10
10
|
<a href="https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml"><img src="https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml/badge.svg" alt="Ultralytics CI"></a>
|
|
@@ -20,11 +20,11 @@
|
|
|
20
20
|
</div>
|
|
21
21
|
<br>
|
|
22
22
|
|
|
23
|
-
[Ultralytics](https://ultralytics.com) [YOLOv8](https://github.com/ultralytics/ultralytics) is a cutting-edge, state-of-the-art (SOTA) model that builds upon the success of previous YOLO versions and introduces new features and improvements to further boost performance and flexibility. YOLOv8 is designed to be fast, accurate, and easy to use, making it an excellent choice for a wide range of object detection and tracking, instance segmentation, image classification and pose estimation tasks.
|
|
23
|
+
[Ultralytics](https://www.ultralytics.com/) [YOLOv8](https://github.com/ultralytics/ultralytics) is a cutting-edge, state-of-the-art (SOTA) model that builds upon the success of previous YOLO versions and introduces new features and improvements to further boost performance and flexibility. YOLOv8 is designed to be fast, accurate, and easy to use, making it an excellent choice for a wide range of object detection and tracking, instance segmentation, image classification and pose estimation tasks.
|
|
24
24
|
|
|
25
25
|
We hope that the resources here will help you get the most out of YOLOv8. Please browse the YOLOv8 <a href="https://docs.ultralytics.com/">Docs</a> for details, raise an issue on <a href="https://github.com/ultralytics/ultralytics/issues/new/choose">GitHub</a> for support, questions, or discussions, become a member of the Ultralytics <a href="https://ultralytics.com/discord">Discord</a>, <a href="https://reddit.com/r/ultralytics">Reddit</a> and <a href="https://community.ultralytics.com">Forums</a>!
|
|
26
26
|
|
|
27
|
-
To request an Enterprise License please complete the form at [Ultralytics Licensing](https://ultralytics.com/license).
|
|
27
|
+
To request an Enterprise License please complete the form at [Ultralytics Licensing](https://www.ultralytics.com/license).
|
|
28
28
|
|
|
29
29
|
<img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/yolo-comparison-plots.png" alt="YOLOv8 performance plots"></a>
|
|
30
30
|
|
|
@@ -103,7 +103,7 @@ See YOLOv8 [Python Docs](https://docs.ultralytics.com/usage/python) for more exa
|
|
|
103
103
|
|
|
104
104
|
### Notebooks
|
|
105
105
|
|
|
106
|
-
Ultralytics provides interactive notebooks for YOLOv8, covering training, validation, tracking, and more. Each notebook is paired with a [YouTube](https://youtube.com/ultralytics?sub_confirmation=1) tutorial, making it easy to learn and implement advanced YOLOv8 features.
|
|
106
|
+
Ultralytics provides interactive notebooks for YOLOv8, covering training, validation, tracking, and more. Each notebook is paired with a [YouTube](https://www.youtube.com/ultralytics?sub_confirmation=1) tutorial, making it easy to learn and implement advanced YOLOv8 features.
|
|
107
107
|
|
|
108
108
|
| Docs | Notebook | YouTube |
|
|
109
109
|
| ---------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: |
|
|
@@ -134,7 +134,7 @@ See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examp
|
|
|
134
134
|
| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 |
|
|
135
135
|
| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 |
|
|
136
136
|
|
|
137
|
-
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org) dataset. <br>Reproduce by `yolo val detect data=coco.yaml device=0`
|
|
137
|
+
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val detect data=coco.yaml device=0`
|
|
138
138
|
- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val detect data=coco.yaml batch=1 device=0|cpu`
|
|
139
139
|
|
|
140
140
|
</details>
|
|
@@ -168,7 +168,7 @@ See [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage e
|
|
|
168
168
|
| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 |
|
|
169
169
|
| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 |
|
|
170
170
|
|
|
171
|
-
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org) dataset. <br>Reproduce by `yolo val segment data=coco-seg.yaml device=0`
|
|
171
|
+
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val segment data=coco-seg.yaml device=0`
|
|
172
172
|
- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val segment data=coco-seg.yaml batch=1 device=0|cpu`
|
|
173
173
|
|
|
174
174
|
</details>
|
|
@@ -186,7 +186,7 @@ See [Pose Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples wit
|
|
|
186
186
|
| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 |
|
|
187
187
|
| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 |
|
|
188
188
|
|
|
189
|
-
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO Keypoints val2017](https://cocodataset.org) dataset. <br>Reproduce by `yolo val pose data=coco-pose.yaml device=0`
|
|
189
|
+
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO Keypoints val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val pose data=coco-pose.yaml device=0`
|
|
190
190
|
- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
|
|
191
191
|
|
|
192
192
|
</details>
|
|
@@ -255,14 +255,14 @@ Our key integrations with leading AI platforms extend the functionality of Ultra
|
|
|
255
255
|
|
|
256
256
|
## <div align="center">Ultralytics HUB</div>
|
|
257
257
|
|
|
258
|
-
Experience seamless AI with [Ultralytics HUB](https://ultralytics.com/hub) ⭐, the all-in-one solution for data visualization, YOLOv5 and YOLOv8 🚀 model training and deployment, without any coding. Transform images into actionable insights and bring your AI visions to life with ease using our cutting-edge platform and user-friendly [Ultralytics App](https://ultralytics.com/
|
|
258
|
+
Experience seamless AI with [Ultralytics HUB](https://www.ultralytics.com/hub) ⭐, the all-in-one solution for data visualization, YOLOv5 and YOLOv8 🚀 model training and deployment, without any coding. Transform images into actionable insights and bring your AI visions to life with ease using our cutting-edge platform and user-friendly [Ultralytics App](https://www.ultralytics.com/app-install). Start your journey for **Free** now!
|
|
259
259
|
|
|
260
260
|
<a href="https://ultralytics.com/hub" target="_blank">
|
|
261
261
|
<img width="100%" src="https://github.com/ultralytics/assets/raw/main/im/ultralytics-hub.png" alt="Ultralytics HUB preview image"></a>
|
|
262
262
|
|
|
263
263
|
## <div align="center">Contribute</div>
|
|
264
264
|
|
|
265
|
-
We love your input! YOLOv5 and YOLOv8 would not be possible without help from our community. Please see our [Contributing Guide](https://docs.ultralytics.com/help/contributing) to get started, and fill out our [Survey](https://ultralytics.com/survey?utm_source=github&utm_medium=social&utm_campaign=Survey) to send us feedback on your experience. Thank you 🙏 to all our contributors!
|
|
265
|
+
We love your input! YOLOv5 and YOLOv8 would not be possible without help from our community. Please see our [Contributing Guide](https://docs.ultralytics.com/help/contributing) to get started, and fill out our [Survey](https://www.ultralytics.com/survey?utm_source=github&utm_medium=social&utm_campaign=Survey) to send us feedback on your experience. Thank you 🙏 to all our contributors!
|
|
266
266
|
|
|
267
267
|
<!-- SVG image from https://opencollective.com/ultralytics/contributors.svg?width=990 -->
|
|
268
268
|
|
|
@@ -273,12 +273,12 @@ We love your input! YOLOv5 and YOLOv8 would not be possible without help from ou
|
|
|
273
273
|
|
|
274
274
|
Ultralytics offers two licensing options to accommodate diverse use cases:
|
|
275
275
|
|
|
276
|
-
- **AGPL-3.0 License**: This [OSI-approved](https://opensource.org/
|
|
277
|
-
- **Enterprise License**: Designed for commercial use, this license permits seamless integration of Ultralytics software and AI models into commercial goods and services, bypassing the open-source requirements of AGPL-3.0. If your scenario involves embedding our solutions into a commercial offering, reach out through [Ultralytics Licensing](https://ultralytics.com/license).
|
|
276
|
+
- **AGPL-3.0 License**: This [OSI-approved](https://opensource.org/license) open-source license is ideal for students and enthusiasts, promoting open collaboration and knowledge sharing. See the [LICENSE](https://github.com/ultralytics/ultralytics/blob/main/LICENSE) file for more details.
|
|
277
|
+
- **Enterprise License**: Designed for commercial use, this license permits seamless integration of Ultralytics software and AI models into commercial goods and services, bypassing the open-source requirements of AGPL-3.0. If your scenario involves embedding our solutions into a commercial offering, reach out through [Ultralytics Licensing](https://www.ultralytics.com/license).
|
|
278
278
|
|
|
279
279
|
## <div align="center">Contact</div>
|
|
280
280
|
|
|
281
|
-
For Ultralytics bug reports and feature requests please visit [GitHub Issues](https://github.com/ultralytics/ultralytics/issues). Become a member of the Ultralytics [Discord](https://
|
|
281
|
+
For Ultralytics bug reports and feature requests please visit [GitHub Issues](https://github.com/ultralytics/ultralytics/issues). Become a member of the Ultralytics [Discord](https://discord.com/invite/ultralytics), [Reddit](https://www.reddit.com/r/ultralytics/), or [Forums](https://community.ultralytics.com/) for asking questions, sharing projects, learning discussions, or for help with all things Ultralytics!
|
|
282
282
|
|
|
283
283
|
<br>
|
|
284
284
|
<div align="center">
|
|
@@ -71,8 +71,8 @@ dependencies = [
|
|
|
71
71
|
"pyyaml>=5.3.1",
|
|
72
72
|
"requests>=2.23.0",
|
|
73
73
|
"scipy>=1.4.1",
|
|
74
|
-
"torch>=1.8.0,<2.4.0; sys_platform == 'win32'", # Windows CPU errors https://github.com/ultralytics/ultralytics/issues/15049
|
|
75
74
|
"torch>=1.8.0",
|
|
75
|
+
"torch>=1.8.0,!=2.4.0; sys_platform == 'win32'", # Windows CPU errors w/ 2.4.0 https://github.com/ultralytics/ultralytics/issues/15049
|
|
76
76
|
"torchvision>=0.9.0",
|
|
77
77
|
"tqdm>=4.64.0", # progress bars
|
|
78
78
|
"psutil", # system utilization
|
|
@@ -94,7 +94,7 @@ dev = [
|
|
|
94
94
|
"mkdocstrings[python]",
|
|
95
95
|
"mkdocs-jupyter", # notebooks
|
|
96
96
|
"mkdocs-redirects", # 301 redirects
|
|
97
|
-
"mkdocs-ultralytics-plugin>=0.1.
|
|
97
|
+
"mkdocs-ultralytics-plugin>=0.1.8", # for meta descriptions and images, dates and authors
|
|
98
98
|
"mkdocs-macros-plugin>=1.0.5" # duplicating content (i.e. export tables) in multiple places
|
|
99
99
|
]
|
|
100
100
|
export = [
|
|
@@ -196,13 +196,14 @@ def test_all_model_yamls():
|
|
|
196
196
|
YOLO(m.name)
|
|
197
197
|
|
|
198
198
|
|
|
199
|
+
@pytest.mark.skipif(WINDOWS, reason="Windows slow CI export bug https://github.com/ultralytics/ultralytics/pull/16003")
|
|
199
200
|
def test_workflow():
|
|
200
201
|
"""Test the complete workflow including training, validation, prediction, and exporting."""
|
|
201
202
|
model = YOLO(MODEL)
|
|
202
203
|
model.train(data="coco8.yaml", epochs=1, imgsz=32, optimizer="SGD")
|
|
203
204
|
model.val(imgsz=32)
|
|
204
205
|
model.predict(SOURCE, imgsz=32)
|
|
205
|
-
model.export(format="torchscript")
|
|
206
|
+
model.export(format="torchscript") # WARNING: Windows slow CI export bug
|
|
206
207
|
|
|
207
208
|
|
|
208
209
|
def test_predict_callback_and_setup():
|
|
@@ -113,7 +113,7 @@ names:
|
|
|
113
113
|
95: Pot
|
|
114
114
|
96: Cow
|
|
115
115
|
97: Cake
|
|
116
|
-
98:
|
|
116
|
+
98: Dining Table
|
|
117
117
|
99: Sheep
|
|
118
118
|
100: Hanger
|
|
119
119
|
101: Blackboard/Whiteboard
|
|
@@ -304,7 +304,7 @@ names:
|
|
|
304
304
|
286: Hammer
|
|
305
305
|
287: Cue
|
|
306
306
|
288: Avocado
|
|
307
|
-
289:
|
|
307
|
+
289: Hami melon
|
|
308
308
|
290: Flask
|
|
309
309
|
291: Mushroom
|
|
310
310
|
292: Screwdriver
|
|
@@ -328,7 +328,7 @@ names:
|
|
|
328
328
|
310: Dishwasher
|
|
329
329
|
311: Crab
|
|
330
330
|
312: Hoverboard
|
|
331
|
-
313:
|
|
331
|
+
313: Meatball
|
|
332
332
|
314: Rice Cooker
|
|
333
333
|
315: Tuba
|
|
334
334
|
316: Calculator
|
|
@@ -370,13 +370,10 @@ def convert_segment_masks_to_yolo_seg(masks_dir, output_dir, classes):
|
|
|
370
370
|
├─ mask_yolo_03.txt
|
|
371
371
|
└─ mask_yolo_04.txt
|
|
372
372
|
"""
|
|
373
|
-
import os
|
|
374
|
-
|
|
375
373
|
pixel_to_class_mapping = {i + 1: i for i in range(classes)}
|
|
376
|
-
for
|
|
377
|
-
if
|
|
378
|
-
|
|
379
|
-
mask = cv2.imread(mask_path, cv2.IMREAD_GRAYSCALE) # Read the mask image in grayscale
|
|
374
|
+
for mask_path in Path(masks_dir).iterdir():
|
|
375
|
+
if mask_path.suffix == ".png":
|
|
376
|
+
mask = cv2.imread(str(mask_path), cv2.IMREAD_GRAYSCALE) # Read the mask image in grayscale
|
|
380
377
|
img_height, img_width = mask.shape # Get image dimensions
|
|
381
378
|
LOGGER.info(f"Processing {mask_path} imgsz = {img_height} x {img_width}")
|
|
382
379
|
|
|
@@ -388,7 +385,7 @@ def convert_segment_masks_to_yolo_seg(masks_dir, output_dir, classes):
|
|
|
388
385
|
continue # Skip background
|
|
389
386
|
class_index = pixel_to_class_mapping.get(value, -1)
|
|
390
387
|
if class_index == -1:
|
|
391
|
-
LOGGER.warning(f"Unknown class for pixel value {value} in file {
|
|
388
|
+
LOGGER.warning(f"Unknown class for pixel value {value} in file {mask_path}, skipping.")
|
|
392
389
|
continue
|
|
393
390
|
|
|
394
391
|
# Create a binary mask for the current class and find contours
|
|
@@ -406,7 +403,7 @@ def convert_segment_masks_to_yolo_seg(masks_dir, output_dir, classes):
|
|
|
406
403
|
yolo_format.append(round(point[1] / img_height, 6))
|
|
407
404
|
yolo_format_data.append(yolo_format)
|
|
408
405
|
# Save Ultralytics YOLO format data to file
|
|
409
|
-
output_path =
|
|
406
|
+
output_path = Path(output_dir) / f"{mask_path.stem}.txt"
|
|
410
407
|
with open(output_path, "w") as file:
|
|
411
408
|
for item in yolo_format_data:
|
|
412
409
|
line = " ".join(map(str, item))
|
|
@@ -610,6 +610,9 @@ class Exporter:
|
|
|
610
610
|
f = self.file.with_suffix(".mlmodel" if mlmodel else ".mlpackage")
|
|
611
611
|
if f.is_dir():
|
|
612
612
|
shutil.rmtree(f)
|
|
613
|
+
if self.args.nms and getattr(self.model, "end2end", False):
|
|
614
|
+
LOGGER.warning(f"{prefix} WARNING ⚠️ 'nms=True' is not available for end2end models. Forcing 'nms=False'.")
|
|
615
|
+
self.args.nms = False
|
|
613
616
|
|
|
614
617
|
bias = [0.0, 0.0, 0.0]
|
|
615
618
|
scale = 1 / 255
|
|
@@ -136,12 +136,12 @@ class GCPRegions:
|
|
|
136
136
|
sorted_results = sorted(results, key=lambda x: x[1])
|
|
137
137
|
|
|
138
138
|
if verbose:
|
|
139
|
-
print(f"{'Region':<25} {'Location':<35} {'Tier':<5}
|
|
139
|
+
print(f"{'Region':<25} {'Location':<35} {'Tier':<5} Latency (ms)")
|
|
140
140
|
for region, mean, std, min_, max_ in sorted_results:
|
|
141
141
|
tier, city, country = self.regions[region]
|
|
142
142
|
location = f"{city}, {country}"
|
|
143
143
|
if mean == float("inf"):
|
|
144
|
-
print(f"{region:<25} {location:<35} {tier:<5}
|
|
144
|
+
print(f"{region:<25} {location:<35} {tier:<5} Timeout")
|
|
145
145
|
else:
|
|
146
146
|
print(f"{region:<25} {location:<35} {tier:<5} {mean:.0f} ± {std:.0f} ({min_:.0f} - {max_:.0f})")
|
|
147
147
|
print(f"\nLowest latency region{'s' if top > 1 else ''}:")
|
|
@@ -346,7 +346,7 @@ class HUBTrainingSession:
|
|
|
346
346
|
"""
|
|
347
347
|
weights = Path(weights)
|
|
348
348
|
if not weights.is_file():
|
|
349
|
-
last = weights.with_name("last
|
|
349
|
+
last = weights.with_name(f"last{weights.suffix}")
|
|
350
350
|
if final and last.is_file():
|
|
351
351
|
LOGGER.warning(
|
|
352
352
|
f"{PREFIX} WARNING ⚠️ Model 'best.pt' not found, copying 'last.pt' to 'best.pt' and uploading. "
|
|
@@ -93,7 +93,7 @@ class FastSAMPredictor(SegmentationPredictor):
|
|
|
93
93
|
else torch.zeros(len(result), dtype=torch.bool, device=self.device)
|
|
94
94
|
)
|
|
95
95
|
for point, label in zip(points, labels):
|
|
96
|
-
point_idx[torch.nonzero(masks[:, point[1], point[0]], as_tuple=True)[0]] =
|
|
96
|
+
point_idx[torch.nonzero(masks[:, point[1], point[0]], as_tuple=True)[0]] = bool(label)
|
|
97
97
|
idx |= point_idx
|
|
98
98
|
if texts is not None:
|
|
99
99
|
if isinstance(texts, str):
|
|
@@ -736,7 +736,7 @@ class PositionEmbeddingSine(nn.Module):
|
|
|
736
736
|
self.num_pos_feats = num_pos_feats // 2
|
|
737
737
|
self.temperature = temperature
|
|
738
738
|
self.normalize = normalize
|
|
739
|
-
if scale is not None and normalize
|
|
739
|
+
if scale is not None and not normalize:
|
|
740
740
|
raise ValueError("normalize should be True if scale is passed")
|
|
741
741
|
if scale is None:
|
|
742
742
|
scale = 2 * math.pi
|
|
@@ -763,8 +763,7 @@ class PositionEmbeddingSine(nn.Module):
|
|
|
763
763
|
def encode_boxes(self, x, y, w, h):
|
|
764
764
|
"""Encodes box coordinates and dimensions into positional embeddings for detection."""
|
|
765
765
|
pos_x, pos_y = self._encode_xy(x, y)
|
|
766
|
-
|
|
767
|
-
return pos
|
|
766
|
+
return torch.cat((pos_y, pos_x, h[:, None], w[:, None]), dim=1)
|
|
768
767
|
|
|
769
768
|
encode = encode_boxes # Backwards compatibility
|
|
770
769
|
|
|
@@ -775,8 +774,7 @@ class PositionEmbeddingSine(nn.Module):
|
|
|
775
774
|
assert bx == by and nx == ny and bx == bl and nx == nl
|
|
776
775
|
pos_x, pos_y = self._encode_xy(x.flatten(), y.flatten())
|
|
777
776
|
pos_x, pos_y = pos_x.reshape(bx, nx, -1), pos_y.reshape(by, ny, -1)
|
|
778
|
-
|
|
779
|
-
return pos
|
|
777
|
+
return torch.cat((pos_y, pos_x, labels[:, :, None]), dim=2)
|
|
780
778
|
|
|
781
779
|
@torch.no_grad()
|
|
782
780
|
def forward(self, x: torch.Tensor):
|
|
@@ -435,9 +435,9 @@ class SAM2MaskDecoder(nn.Module):
|
|
|
435
435
|
upscaled_embedding = act1(ln1(dc1(src) + feat_s1))
|
|
436
436
|
upscaled_embedding = act2(dc2(upscaled_embedding) + feat_s0)
|
|
437
437
|
|
|
438
|
-
hyper_in_list: List[torch.Tensor] = [
|
|
439
|
-
|
|
440
|
-
|
|
438
|
+
hyper_in_list: List[torch.Tensor] = [
|
|
439
|
+
self.output_hypernetworks_mlps[i](mask_tokens_out[:, i, :]) for i in range(self.num_mask_tokens)
|
|
440
|
+
]
|
|
441
441
|
hyper_in = torch.stack(hyper_in_list, dim=1)
|
|
442
442
|
b, c, h, w = upscaled_embedding.shape
|
|
443
443
|
masks = (hyper_in @ upscaled_embedding.view(b, c, h * w)).view(b, -1, h, w)
|
|
@@ -459,8 +459,7 @@ class SAM2MaskDecoder(nn.Module):
|
|
|
459
459
|
stability_delta = self.dynamic_multimask_stability_delta
|
|
460
460
|
area_i = torch.sum(mask_logits > stability_delta, dim=-1).float()
|
|
461
461
|
area_u = torch.sum(mask_logits > -stability_delta, dim=-1).float()
|
|
462
|
-
|
|
463
|
-
return stability_scores
|
|
462
|
+
return torch.where(area_u > 0, area_i / area_u, 1.0)
|
|
464
463
|
|
|
465
464
|
def _dynamic_multimask_via_stability(self, all_mask_logits, all_iou_scores):
|
|
466
465
|
"""
|
|
@@ -491,12 +491,11 @@ class ImageEncoder(nn.Module):
|
|
|
491
491
|
features, pos = features[: -self.scalp], pos[: -self.scalp]
|
|
492
492
|
|
|
493
493
|
src = features[-1]
|
|
494
|
-
|
|
494
|
+
return {
|
|
495
495
|
"vision_features": src,
|
|
496
496
|
"vision_pos_enc": pos,
|
|
497
497
|
"backbone_fpn": features,
|
|
498
498
|
}
|
|
499
|
-
return output
|
|
500
499
|
|
|
501
500
|
|
|
502
501
|
class FpnNeck(nn.Module):
|
|
@@ -577,7 +576,7 @@ class FpnNeck(nn.Module):
|
|
|
577
576
|
|
|
578
577
|
self.convs.append(current)
|
|
579
578
|
self.fpn_interp_model = fpn_interp_model
|
|
580
|
-
assert fuse_type in
|
|
579
|
+
assert fuse_type in {"sum", "avg"}
|
|
581
580
|
self.fuse_type = fuse_type
|
|
582
581
|
|
|
583
582
|
# levels to have top-down features in its outputs
|
|
@@ -671,26 +671,19 @@ class SAM2Model(torch.nn.Module):
|
|
|
671
671
|
t_rel = self.num_maskmem - t_pos # how many frames before current frame
|
|
672
672
|
if t_rel == 1:
|
|
673
673
|
# for t_rel == 1, we take the last frame (regardless of r)
|
|
674
|
-
if
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
674
|
+
prev_frame_idx = frame_idx + t_rel if track_in_reverse else frame_idx - t_rel
|
|
675
|
+
elif not track_in_reverse:
|
|
676
|
+
# first find the nearest frame among every r-th frames before this frame
|
|
677
|
+
# for r=1, this would be (frame_idx - 2)
|
|
678
|
+
prev_frame_idx = ((frame_idx - 2) // r) * r
|
|
679
|
+
# then seek further among every r-th frames
|
|
680
|
+
prev_frame_idx = prev_frame_idx - (t_rel - 2) * r
|
|
680
681
|
else:
|
|
681
|
-
#
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
# then seek further among every r-th frames
|
|
687
|
-
prev_frame_idx = prev_frame_idx - (t_rel - 2) * r
|
|
688
|
-
else:
|
|
689
|
-
# first find the nearest frame among every r-th frames after this frame
|
|
690
|
-
# for r=1, this would be (frame_idx + 2)
|
|
691
|
-
prev_frame_idx = -(-(frame_idx + 2) // r) * r
|
|
692
|
-
# then seek further among every r-th frames
|
|
693
|
-
prev_frame_idx = prev_frame_idx + (t_rel - 2) * r
|
|
682
|
+
# first find the nearest frame among every r-th frames after this frame
|
|
683
|
+
# for r=1, this would be (frame_idx + 2)
|
|
684
|
+
prev_frame_idx = -(-(frame_idx + 2) // r) * r
|
|
685
|
+
# then seek further among every r-th frames
|
|
686
|
+
prev_frame_idx = prev_frame_idx + (t_rel - 2) * r
|
|
694
687
|
out = output_dict["non_cond_frame_outputs"].get(prev_frame_idx, None)
|
|
695
688
|
if out is None:
|
|
696
689
|
# If an unselected conditioning frame is among the last (self.num_maskmem - 1)
|
|
@@ -739,7 +732,7 @@ class SAM2Model(torch.nn.Module):
|
|
|
739
732
|
if out is not None:
|
|
740
733
|
pos_and_ptrs.append((t_diff, out["obj_ptr"]))
|
|
741
734
|
# If we have at least one object pointer, add them to the across attention
|
|
742
|
-
if
|
|
735
|
+
if pos_and_ptrs:
|
|
743
736
|
pos_list, ptrs_list = zip(*pos_and_ptrs)
|
|
744
737
|
# stack object pointers along dim=0 into [ptr_seq_len, B, C] shape
|
|
745
738
|
obj_ptrs = torch.stack(ptrs_list, dim=0)
|
|
@@ -930,12 +923,11 @@ class SAM2Model(torch.nn.Module):
|
|
|
930
923
|
def _use_multimask(self, is_init_cond_frame, point_inputs):
|
|
931
924
|
"""Determines whether to use multiple mask outputs in the SAM head based on configuration and inputs."""
|
|
932
925
|
num_pts = 0 if point_inputs is None else point_inputs["point_labels"].size(1)
|
|
933
|
-
|
|
926
|
+
return (
|
|
934
927
|
self.multimask_output_in_sam
|
|
935
928
|
and (is_init_cond_frame or self.multimask_output_for_tracking)
|
|
936
929
|
and (self.multimask_min_pt_num <= num_pts <= self.multimask_max_pt_num)
|
|
937
930
|
)
|
|
938
|
-
return multimask_output
|
|
939
931
|
|
|
940
932
|
def _apply_non_overlapping_constraints(self, pred_masks):
|
|
941
933
|
"""Applies non-overlapping constraints to masks, keeping highest scoring object per location."""
|
|
@@ -53,7 +53,7 @@ class ClassificationPredictor(BasePredictor):
|
|
|
53
53
|
if not isinstance(orig_imgs, list): # input images are a torch.Tensor, not a list
|
|
54
54
|
orig_imgs = ops.convert_torch2numpy_batch(orig_imgs)
|
|
55
55
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
56
|
+
return [
|
|
57
|
+
Results(orig_img, path=img_path, names=self.model.names, probs=pred)
|
|
58
|
+
for pred, orig_img, img_path in zip(preds, orig_imgs, self.batch[0])
|
|
59
|
+
]
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
# Ultralytics YOLO 🚀, AGPL-3.0 license
|
|
2
2
|
|
|
3
|
+
from copy import copy
|
|
4
|
+
|
|
3
5
|
import torch
|
|
4
6
|
|
|
5
7
|
from ultralytics.data import ClassificationDataset, build_dataloader
|
|
@@ -107,7 +109,9 @@ class ClassificationTrainer(BaseTrainer):
|
|
|
107
109
|
def get_validator(self):
|
|
108
110
|
"""Returns an instance of ClassificationValidator for validation."""
|
|
109
111
|
self.loss_names = ["loss"]
|
|
110
|
-
return yolo.classify.ClassificationValidator(
|
|
112
|
+
return yolo.classify.ClassificationValidator(
|
|
113
|
+
self.test_loader, self.save_dir, args=copy(self.args), _callbacks=self.callbacks
|
|
114
|
+
)
|
|
111
115
|
|
|
112
116
|
def label_loss_items(self, loss_items=None, prefix="train"):
|
|
113
117
|
"""
|
|
@@ -18,5 +18,4 @@ class AGLU(nn.Module):
|
|
|
18
18
|
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
|
19
19
|
"""Compute the forward pass of the Unified activation function."""
|
|
20
20
|
lam = torch.clamp(self.lambd, min=0.0001)
|
|
21
|
-
|
|
22
|
-
return y # for AGLU simply return y * input
|
|
21
|
+
return torch.exp((1 / lam) * self.act((self.kappa * x) - torch.log(lam)))
|
|
@@ -341,11 +341,8 @@ class DetectionModel(BaseModel):
|
|
|
341
341
|
|
|
342
342
|
def _predict_augment(self, x):
|
|
343
343
|
"""Perform augmentations on input image x and return augmented inference and train outputs."""
|
|
344
|
-
if getattr(self, "end2end", False):
|
|
345
|
-
LOGGER.warning(
|
|
346
|
-
"WARNING ⚠️ End2End model does not support 'augment=True' prediction. "
|
|
347
|
-
"Reverting to single-scale prediction."
|
|
348
|
-
)
|
|
344
|
+
if getattr(self, "end2end", False) or self.__class__.__name__ != "DetectionModel":
|
|
345
|
+
LOGGER.warning("WARNING ⚠️ Model does not support 'augment=True', reverting to single-scale prediction.")
|
|
349
346
|
return self._predict_once(x)
|
|
350
347
|
img_size = x.shape[-2:] # height, width
|
|
351
348
|
s = [1, 0.83, 0.67] # scales
|