ultralytics 8.3.0__py3-none-any.whl → 8.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ultralytics might be problematic. Click here for more details.
- ultralytics/__init__.py +1 -1
- ultralytics/cfg/__init__.py +20 -20
- ultralytics/utils/checks.py +8 -8
- {ultralytics-8.3.0.dist-info → ultralytics-8.3.1.dist-info}/METADATA +37 -37
- {ultralytics-8.3.0.dist-info → ultralytics-8.3.1.dist-info}/RECORD +9 -9
- {ultralytics-8.3.0.dist-info → ultralytics-8.3.1.dist-info}/LICENSE +0 -0
- {ultralytics-8.3.0.dist-info → ultralytics-8.3.1.dist-info}/WHEEL +0 -0
- {ultralytics-8.3.0.dist-info → ultralytics-8.3.1.dist-info}/entry_points.txt +0 -0
- {ultralytics-8.3.0.dist-info → ultralytics-8.3.1.dist-info}/top_level.txt +0 -0
ultralytics/__init__.py
CHANGED
ultralytics/cfg/__init__.py
CHANGED
|
@@ -42,11 +42,11 @@ TASK2DATA = {
|
|
|
42
42
|
"obb": "dota8.yaml",
|
|
43
43
|
}
|
|
44
44
|
TASK2MODEL = {
|
|
45
|
-
"detect": "
|
|
46
|
-
"segment": "
|
|
47
|
-
"classify": "
|
|
48
|
-
"pose": "
|
|
49
|
-
"obb": "
|
|
45
|
+
"detect": "yolo11n.pt",
|
|
46
|
+
"segment": "yolo11n-seg.pt",
|
|
47
|
+
"classify": "yolo11n-cls.pt",
|
|
48
|
+
"pose": "yolo11n-pose.pt",
|
|
49
|
+
"obb": "yolo11n-obb.pt",
|
|
50
50
|
}
|
|
51
51
|
TASK2METRIC = {
|
|
52
52
|
"detect": "metrics/mAP50-95(B)",
|
|
@@ -69,19 +69,19 @@ CLI_HELP_MSG = f"""
|
|
|
69
69
|
See all ARGS at https://docs.ultralytics.com/usage/cfg or with 'yolo cfg'
|
|
70
70
|
|
|
71
71
|
1. Train a detection model for 10 epochs with an initial learning_rate of 0.01
|
|
72
|
-
yolo train data=coco8.yaml model=
|
|
72
|
+
yolo train data=coco8.yaml model=yolo11n.pt epochs=10 lr0=0.01
|
|
73
73
|
|
|
74
74
|
2. Predict a YouTube video using a pretrained segmentation model at image size 320:
|
|
75
|
-
yolo predict model=
|
|
75
|
+
yolo predict model=yolo11n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320
|
|
76
76
|
|
|
77
77
|
3. Val a pretrained detection model at batch-size 1 and image size 640:
|
|
78
|
-
yolo val model=
|
|
78
|
+
yolo val model=yolo11n.pt data=coco8.yaml batch=1 imgsz=640
|
|
79
79
|
|
|
80
|
-
4. Export a
|
|
81
|
-
yolo export model=
|
|
80
|
+
4. Export a YOLO11n classification model to ONNX format at image size 224 by 128 (no TASK required)
|
|
81
|
+
yolo export model=yolo11n-cls.pt format=onnx imgsz=224,128
|
|
82
82
|
|
|
83
83
|
5. Explore your datasets using semantic search and SQL with a simple GUI powered by Ultralytics Explorer API
|
|
84
|
-
yolo explorer data=data.yaml model=
|
|
84
|
+
yolo explorer data=data.yaml model=yolo11n.pt
|
|
85
85
|
|
|
86
86
|
6. Streamlit real-time webcam inference GUI
|
|
87
87
|
yolo streamlit-predict
|
|
@@ -517,7 +517,7 @@ def handle_yolo_settings(args: List[str]) -> None:
|
|
|
517
517
|
|
|
518
518
|
Examples:
|
|
519
519
|
>>> handle_yolo_settings(["reset"]) # Reset YOLO settings
|
|
520
|
-
>>> handle_yolo_settings(["default_cfg_path=
|
|
520
|
+
>>> handle_yolo_settings(["default_cfg_path=yolo11n.yaml"]) # Update a specific setting
|
|
521
521
|
|
|
522
522
|
Notes:
|
|
523
523
|
- If no arguments are provided, the function will display the current settings.
|
|
@@ -557,7 +557,7 @@ def handle_explorer(args: List[str]):
|
|
|
557
557
|
|
|
558
558
|
Examples:
|
|
559
559
|
```bash
|
|
560
|
-
yolo explorer data=data.yaml model=
|
|
560
|
+
yolo explorer data=data.yaml model=yolo11n.pt
|
|
561
561
|
```
|
|
562
562
|
|
|
563
563
|
Notes:
|
|
@@ -611,9 +611,9 @@ def parse_key_value_pair(pair: str = "key=value"):
|
|
|
611
611
|
AssertionError: If the value is missing or empty.
|
|
612
612
|
|
|
613
613
|
Examples:
|
|
614
|
-
>>> key, value = parse_key_value_pair("model=
|
|
614
|
+
>>> key, value = parse_key_value_pair("model=yolo11n.pt")
|
|
615
615
|
>>> print(f"Key: {key}, Value: {value}")
|
|
616
|
-
Key: model, Value:
|
|
616
|
+
Key: model, Value: yolo11n.pt
|
|
617
617
|
|
|
618
618
|
>>> key, value = parse_key_value_pair("epochs=100")
|
|
619
619
|
>>> print(f"Key: {key}, Value: {value}")
|
|
@@ -686,13 +686,13 @@ def entrypoint(debug=""):
|
|
|
686
686
|
|
|
687
687
|
Examples:
|
|
688
688
|
Train a detection model for 10 epochs with an initial learning_rate of 0.01:
|
|
689
|
-
>>> entrypoint("train data=coco8.yaml model=
|
|
689
|
+
>>> entrypoint("train data=coco8.yaml model=yolo11n.pt epochs=10 lr0=0.01")
|
|
690
690
|
|
|
691
691
|
Predict a YouTube video using a pretrained segmentation model at image size 320:
|
|
692
|
-
>>> entrypoint("predict model=
|
|
692
|
+
>>> entrypoint("predict model=yolo11n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320")
|
|
693
693
|
|
|
694
694
|
Validate a pretrained detection model at batch-size 1 and image size 640:
|
|
695
|
-
>>> entrypoint("val model=
|
|
695
|
+
>>> entrypoint("val model=yolo11n.pt data=coco8.yaml batch=1 imgsz=640")
|
|
696
696
|
|
|
697
697
|
Notes:
|
|
698
698
|
- If no arguments are passed, the function will display the usage help message.
|
|
@@ -782,7 +782,7 @@ def entrypoint(debug=""):
|
|
|
782
782
|
# Model
|
|
783
783
|
model = overrides.pop("model", DEFAULT_CFG.model)
|
|
784
784
|
if model is None:
|
|
785
|
-
model = "
|
|
785
|
+
model = "yolo11n.pt"
|
|
786
786
|
LOGGER.warning(f"WARNING ⚠️ 'model' argument is missing. Using default 'model={model}'.")
|
|
787
787
|
overrides["model"] = model
|
|
788
788
|
stem = Path(model).stem.lower()
|
|
@@ -869,5 +869,5 @@ def copy_default_cfg():
|
|
|
869
869
|
|
|
870
870
|
|
|
871
871
|
if __name__ == "__main__":
|
|
872
|
-
# Example: entrypoint(debug='yolo predict model=
|
|
872
|
+
# Example: entrypoint(debug='yolo predict model=yolo11n.pt')
|
|
873
873
|
entrypoint(debug="")
|
ultralytics/utils/checks.py
CHANGED
|
@@ -629,24 +629,24 @@ def collect_system_info():
|
|
|
629
629
|
|
|
630
630
|
def check_amp(model):
|
|
631
631
|
"""
|
|
632
|
-
Checks the PyTorch Automatic Mixed Precision (AMP) functionality of a
|
|
632
|
+
Checks the PyTorch Automatic Mixed Precision (AMP) functionality of a YOLO11 model. If the checks fail, it means
|
|
633
633
|
there are anomalies with AMP on the system that may cause NaN losses or zero-mAP results, so AMP will be disabled
|
|
634
634
|
during training.
|
|
635
635
|
|
|
636
636
|
Args:
|
|
637
|
-
model (nn.Module): A
|
|
637
|
+
model (nn.Module): A YOLO11 model instance.
|
|
638
638
|
|
|
639
639
|
Example:
|
|
640
640
|
```python
|
|
641
641
|
from ultralytics import YOLO
|
|
642
642
|
from ultralytics.utils.checks import check_amp
|
|
643
643
|
|
|
644
|
-
model = YOLO("
|
|
644
|
+
model = YOLO("yolo11n.pt").model.cuda()
|
|
645
645
|
check_amp(model)
|
|
646
646
|
```
|
|
647
647
|
|
|
648
648
|
Returns:
|
|
649
|
-
(bool): Returns True if the AMP functionality works correctly with
|
|
649
|
+
(bool): Returns True if the AMP functionality works correctly with YOLO11 model, else False.
|
|
650
650
|
"""
|
|
651
651
|
from ultralytics.utils.torch_utils import autocast
|
|
652
652
|
|
|
@@ -665,19 +665,19 @@ def check_amp(model):
|
|
|
665
665
|
|
|
666
666
|
im = ASSETS / "bus.jpg" # image to check
|
|
667
667
|
prefix = colorstr("AMP: ")
|
|
668
|
-
LOGGER.info(f"{prefix}running Automatic Mixed Precision (AMP) checks with
|
|
668
|
+
LOGGER.info(f"{prefix}running Automatic Mixed Precision (AMP) checks with YOLO11n...")
|
|
669
669
|
warning_msg = "Setting 'amp=True'. If you experience zero-mAP or NaN losses you can disable AMP with amp=False."
|
|
670
670
|
try:
|
|
671
671
|
from ultralytics import YOLO
|
|
672
672
|
|
|
673
|
-
assert amp_allclose(YOLO("
|
|
673
|
+
assert amp_allclose(YOLO("yolo11n.pt"), im)
|
|
674
674
|
LOGGER.info(f"{prefix}checks passed ✅")
|
|
675
675
|
except ConnectionError:
|
|
676
|
-
LOGGER.warning(f"{prefix}checks skipped ⚠️, offline and unable to download
|
|
676
|
+
LOGGER.warning(f"{prefix}checks skipped ⚠️, offline and unable to download YOLO11n. {warning_msg}")
|
|
677
677
|
except (AttributeError, ModuleNotFoundError):
|
|
678
678
|
LOGGER.warning(
|
|
679
679
|
f"{prefix}checks skipped ⚠️. "
|
|
680
|
-
f"Unable to load
|
|
680
|
+
f"Unable to load YOLO11n due to possible Ultralytics package modifications. {warning_msg}"
|
|
681
681
|
)
|
|
682
682
|
except AssertionError:
|
|
683
683
|
LOGGER.warning(
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ultralytics
|
|
3
|
-
Version: 8.3.
|
|
3
|
+
Version: 8.3.1
|
|
4
4
|
Summary: Ultralytics YOLO for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
|
|
5
5
|
Author: Ayush Chaurasia
|
|
6
6
|
Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
|
|
@@ -11,7 +11,7 @@ Project-URL: Source, https://github.com/ultralytics/ultralytics
|
|
|
11
11
|
Project-URL: Documentation, https://docs.ultralytics.com
|
|
12
12
|
Project-URL: Bug Reports, https://github.com/ultralytics/ultralytics/issues
|
|
13
13
|
Project-URL: Changelog, https://github.com/ultralytics/ultralytics/releases
|
|
14
|
-
Keywords: machine-learning,deep-learning,computer-vision,ML,DL,AI,YOLO,YOLOv3,YOLOv5,YOLOv8,YOLOv9,YOLOv10,HUB,Ultralytics
|
|
14
|
+
Keywords: machine-learning,deep-learning,computer-vision,ML,DL,AI,YOLO,YOLOv3,YOLOv5,YOLOv8,YOLOv9,YOLOv10,YOLO11,HUB,Ultralytics
|
|
15
15
|
Classifier: Development Status :: 4 - Beta
|
|
16
16
|
Classifier: Intended Audience :: Developers
|
|
17
17
|
Classifier: Intended Audience :: Education
|
|
@@ -212,13 +212,13 @@ All [Models](https://github.com/ultralytics/ultralytics/tree/main/ultralytics/cf
|
|
|
212
212
|
|
|
213
213
|
See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examples with these models trained on [COCO](https://docs.ultralytics.com/datasets/detect/coco/), which include 80 pre-trained classes.
|
|
214
214
|
|
|
215
|
-
| Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>
|
|
216
|
-
| ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ |
|
|
217
|
-
| [YOLO11n](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n.pt) | 640 | 39.5 | 56.12 ± 0.82 ms | 1.55 ± 0.01 ms
|
|
218
|
-
| [YOLO11s](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s.pt) | 640 | 47.0 | 90.01 ± 1.17 ms | 2.46 ± 0.00 ms
|
|
219
|
-
| [YOLO11m](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m.pt) | 640 | 51.5 | 183.20 ± 2.04 ms | 4.70 ± 0.06 ms
|
|
220
|
-
| [YOLO11l](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l.pt) | 640 | 53.4 | 238.64 ± 1.39 ms | 6.16 ± 0.08 ms
|
|
221
|
-
| [YOLO11x](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x.pt) | 640 | 54.7 | 462.78 ± 6.66 ms | 11.31 ± 0.24 ms
|
|
215
|
+
| Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
216
|
+
| ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
217
|
+
| [YOLO11n](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n.pt) | 640 | 39.5 | 56.12 ± 0.82 ms | 1.55 ± 0.01 ms | 2.6 | 6.5 |
|
|
218
|
+
| [YOLO11s](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s.pt) | 640 | 47.0 | 90.01 ± 1.17 ms | 2.46 ± 0.00 ms | 9.4 | 21.5 |
|
|
219
|
+
| [YOLO11m](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m.pt) | 640 | 51.5 | 183.20 ± 2.04 ms | 4.70 ± 0.06 ms | 20.1 | 68.0 |
|
|
220
|
+
| [YOLO11l](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l.pt) | 640 | 53.4 | 238.64 ± 1.39 ms | 6.16 ± 0.08 ms | 25.3 | 86.9 |
|
|
221
|
+
| [YOLO11x](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x.pt) | 640 | 54.7 | 462.78 ± 6.66 ms | 11.31 ± 0.24 ms | 56.9 | 194.9 |
|
|
222
222
|
|
|
223
223
|
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val detect data=coco.yaml device=0`
|
|
224
224
|
- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val detect data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -229,13 +229,13 @@ See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examp
|
|
|
229
229
|
|
|
230
230
|
See [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage examples with these models trained on [COCO-Seg](https://docs.ultralytics.com/datasets/segment/coco/), which include 80 pre-trained classes.
|
|
231
231
|
|
|
232
|
-
| Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>
|
|
233
|
-
| -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ |
|
|
234
|
-
| [YOLO11n-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-seg.pt) | 640 | 38.9 | 32.0 | 65.90 ± 1.14 ms | 1.84 ± 0.00 ms
|
|
235
|
-
| [YOLO11s-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-seg.pt) | 640 | 46.6 | 37.8 | 117.56 ± 4.89 ms | 2.94 ± 0.01 ms
|
|
236
|
-
| [YOLO11m-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-seg.pt) | 640 | 51.5 | 41.5 | 281.63 ± 1.16 ms | 6.31 ± 0.09 ms
|
|
237
|
-
| [YOLO11l-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-seg.pt) | 640 | 53.4 | 42.9 | 344.16 ± 3.17 ms | 7.78 ± 0.16 ms
|
|
238
|
-
| [YOLO11x-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-seg.pt) | 640 | 54.7 | 43.8 | 664.50 ± 3.24 ms | 15.75 ± 0.67 ms
|
|
232
|
+
| Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
233
|
+
| -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
234
|
+
| [YOLO11n-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-seg.pt) | 640 | 38.9 | 32.0 | 65.90 ± 1.14 ms | 1.84 ± 0.00 ms | 2.9 | 10.4 |
|
|
235
|
+
| [YOLO11s-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-seg.pt) | 640 | 46.6 | 37.8 | 117.56 ± 4.89 ms | 2.94 ± 0.01 ms | 10.1 | 35.5 |
|
|
236
|
+
| [YOLO11m-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-seg.pt) | 640 | 51.5 | 41.5 | 281.63 ± 1.16 ms | 6.31 ± 0.09 ms | 22.4 | 123.3 |
|
|
237
|
+
| [YOLO11l-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-seg.pt) | 640 | 53.4 | 42.9 | 344.16 ± 3.17 ms | 7.78 ± 0.16 ms | 27.6 | 142.2 |
|
|
238
|
+
| [YOLO11x-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-seg.pt) | 640 | 54.7 | 43.8 | 664.50 ± 3.24 ms | 15.75 ± 0.67 ms | 62.1 | 319.0 |
|
|
239
239
|
|
|
240
240
|
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val segment data=coco-seg.yaml device=0`
|
|
241
241
|
- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val segment data=coco-seg.yaml batch=1 device=0|cpu`
|
|
@@ -246,13 +246,13 @@ See [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage e
|
|
|
246
246
|
|
|
247
247
|
See [Pose Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples with these models trained on [COCO-Pose](https://docs.ultralytics.com/datasets/pose/coco/), which include 1 pre-trained class, person.
|
|
248
248
|
|
|
249
|
-
| Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>
|
|
250
|
-
| ---------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ |
|
|
251
|
-
| [YOLO11n-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-pose.pt) | 640 | 50.0 | 81.0 | 52.40 ± 0.51 ms | 1.72 ± 0.01 ms
|
|
252
|
-
| [YOLO11s-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-pose.pt) | 640 | 58.9 | 86.3 | 90.54 ± 0.59 ms | 2.57 ± 0.00 ms
|
|
253
|
-
| [YOLO11m-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-pose.pt) | 640 | 64.9 | 89.4 | 187.28 ± 0.77 ms | 4.94 ± 0.05 ms
|
|
254
|
-
| [YOLO11l-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-pose.pt) | 640 | 66.1 | 89.9 | 247.69 ± 1.10 ms | 6.42 ± 0.13 ms
|
|
255
|
-
| [YOLO11x-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-pose.pt) | 640 | 69.5 | 91.1 | 487.97 ± 13.91 ms | 12.06 ± 0.20 ms
|
|
249
|
+
| Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
250
|
+
| ---------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
251
|
+
| [YOLO11n-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-pose.pt) | 640 | 50.0 | 81.0 | 52.40 ± 0.51 ms | 1.72 ± 0.01 ms | 2.9 | 7.6 |
|
|
252
|
+
| [YOLO11s-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-pose.pt) | 640 | 58.9 | 86.3 | 90.54 ± 0.59 ms | 2.57 ± 0.00 ms | 9.9 | 23.2 |
|
|
253
|
+
| [YOLO11m-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-pose.pt) | 640 | 64.9 | 89.4 | 187.28 ± 0.77 ms | 4.94 ± 0.05 ms | 20.9 | 71.7 |
|
|
254
|
+
| [YOLO11l-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-pose.pt) | 640 | 66.1 | 89.9 | 247.69 ± 1.10 ms | 6.42 ± 0.13 ms | 26.2 | 90.7 |
|
|
255
|
+
| [YOLO11x-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-pose.pt) | 640 | 69.5 | 91.1 | 487.97 ± 13.91 ms | 12.06 ± 0.20 ms | 58.8 | 203.3 |
|
|
256
256
|
|
|
257
257
|
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO Keypoints val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val pose data=coco-pose.yaml device=0`
|
|
258
258
|
- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
|
|
@@ -263,13 +263,13 @@ See [Pose Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples wit
|
|
|
263
263
|
|
|
264
264
|
See [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples with these models trained on [DOTAv1](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10/), which include 15 pre-trained classes.
|
|
265
265
|
|
|
266
|
-
| Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>
|
|
267
|
-
| -------------------------------------------------------------------------------------------- | --------------------- | ------------------ | ------------------------------ |
|
|
268
|
-
| [YOLO11n-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-obb.pt) | 1024 | 78.4 | 117.56 ± 0.80 ms | 4.43 ± 0.01 ms
|
|
269
|
-
| [YOLO11s-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-obb.pt) | 1024 | 79.5 | 219.41 ± 4.00 ms | 5.13 ± 0.02 ms
|
|
270
|
-
| [YOLO11m-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-obb.pt) | 1024 | 80.9 | 562.81 ± 2.87 ms | 10.07 ± 0.38 ms
|
|
271
|
-
| [YOLO11l-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-obb.pt) | 1024 | 81.0 | 712.49 ± 4.98 ms | 13.46 ± 0.55 ms
|
|
272
|
-
| [YOLO11x-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-obb.pt) | 1024 | 81.3 | 1408.63 ± 7.67 ms | 28.59 ± 0.96 ms
|
|
266
|
+
| Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
267
|
+
| -------------------------------------------------------------------------------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
268
|
+
| [YOLO11n-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-obb.pt) | 1024 | 78.4 | 117.56 ± 0.80 ms | 4.43 ± 0.01 ms | 2.7 | 17.2 |
|
|
269
|
+
| [YOLO11s-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-obb.pt) | 1024 | 79.5 | 219.41 ± 4.00 ms | 5.13 ± 0.02 ms | 9.7 | 57.5 |
|
|
270
|
+
| [YOLO11m-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-obb.pt) | 1024 | 80.9 | 562.81 ± 2.87 ms | 10.07 ± 0.38 ms | 20.9 | 183.5 |
|
|
271
|
+
| [YOLO11l-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-obb.pt) | 1024 | 81.0 | 712.49 ± 4.98 ms | 13.46 ± 0.55 ms | 26.2 | 232.0 |
|
|
272
|
+
| [YOLO11x-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-obb.pt) | 1024 | 81.3 | 1408.63 ± 7.67 ms | 28.59 ± 0.96 ms | 58.8 | 520.2 |
|
|
273
273
|
|
|
274
274
|
- **mAP<sup>test</sup>** values are for single-model multiscale on [DOTAv1](https://captain-whu.github.io/DOTA/index.html) dataset. <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to [DOTA evaluation](https://captain-whu.github.io/DOTA/evaluation.html).
|
|
275
275
|
- **Speed** averaged over DOTAv1 val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
|
|
@@ -280,13 +280,13 @@ See [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples with
|
|
|
280
280
|
|
|
281
281
|
See [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usage examples with these models trained on [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), which include 1000 pretrained classes.
|
|
282
282
|
|
|
283
|
-
| Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>
|
|
284
|
-
| -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ |
|
|
285
|
-
| [YOLO11n-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-cls.pt) | 224 | 70.0 | 89.4 | 5.03 ± 0.32 ms | 1.10 ± 0.01 ms
|
|
286
|
-
| [YOLO11s-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-cls.pt) | 224 | 75.4 | 92.7 | 7.89 ± 0.18 ms | 1.34 ± 0.01 ms
|
|
287
|
-
| [YOLO11m-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-cls.pt) | 224 | 77.3 | 93.9 | 17.17 ± 0.40 ms | 1.95 ± 0.00 ms
|
|
288
|
-
| [YOLO11l-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-cls.pt) | 224 | 78.3 | 94.3 | 23.17 ± 0.29 ms | 2.76 ± 0.00 ms
|
|
289
|
-
| [YOLO11x-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-cls.pt) | 224 | 79.5 | 94.9 | 41.41 ± 0.94 ms | 3.82 ± 0.00 ms
|
|
283
|
+
| Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) at 640 |
|
|
284
|
+
| -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ |
|
|
285
|
+
| [YOLO11n-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-cls.pt) | 224 | 70.0 | 89.4 | 5.03 ± 0.32 ms | 1.10 ± 0.01 ms | 1.6 | 3.3 |
|
|
286
|
+
| [YOLO11s-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-cls.pt) | 224 | 75.4 | 92.7 | 7.89 ± 0.18 ms | 1.34 ± 0.01 ms | 5.5 | 12.1 |
|
|
287
|
+
| [YOLO11m-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-cls.pt) | 224 | 77.3 | 93.9 | 17.17 ± 0.40 ms | 1.95 ± 0.00 ms | 10.4 | 39.3 |
|
|
288
|
+
| [YOLO11l-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-cls.pt) | 224 | 78.3 | 94.3 | 23.17 ± 0.29 ms | 2.76 ± 0.00 ms | 12.9 | 49.4 |
|
|
289
|
+
| [YOLO11x-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-cls.pt) | 224 | 79.5 | 94.9 | 41.41 ± 0.94 ms | 3.82 ± 0.00 ms | 28.4 | 110.4 |
|
|
290
290
|
|
|
291
291
|
- **acc** values are model accuracies on the [ImageNet](https://www.image-net.org/) dataset validation set. <br>Reproduce by `yolo val classify data=path/to/ImageNet device=0`
|
|
292
292
|
- **Speed** averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`
|
|
@@ -8,10 +8,10 @@ tests/test_exports.py,sha256=fpTKEVBUGLF3WiZPNKRs-IEcIY4cfxgvgKjUNfodjww,8042
|
|
|
8
8
|
tests/test_integrations.py,sha256=f5-QCUk1SU_-qn4mBCZwS3GN3tXEBIIXo4z2EhExbHw,6126
|
|
9
9
|
tests/test_python.py,sha256=I1RRdCwLdrc3jX06huVxct8HX8ccQOmQgVpuEflRl0U,23560
|
|
10
10
|
tests/test_solutions.py,sha256=eAaLf1wM7IJ6DjT7NEw6sRaeDuTX0ZgsTjrI33XFCXE,3300
|
|
11
|
-
ultralytics/__init__.py,sha256=
|
|
11
|
+
ultralytics/__init__.py,sha256=v5M2b4dAONrER9wuum2weuqz6i71ntKRaMrCBiaLCmg,693
|
|
12
12
|
ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
|
|
13
13
|
ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
|
|
14
|
-
ultralytics/cfg/__init__.py,sha256=
|
|
14
|
+
ultralytics/cfg/__init__.py,sha256=62PSSAa0W4-gAEcRNKoKbcxUWBeFNs0ss2O4XJQhOPY,33145
|
|
15
15
|
ultralytics/cfg/default.yaml,sha256=tkBn3c6duKGFyENuULkWessAqaaxo9atuOxXq3XbItM,8314
|
|
16
16
|
ultralytics/cfg/datasets/Argoverse.yaml,sha256=FyeuJT5CHq_9d4hlfAf0kpZlnbUMO0S--UJ1yIqcdKk,3134
|
|
17
17
|
ultralytics/cfg/datasets/DOTAv1.5.yaml,sha256=QVfp_Qp-4rukuicaB4qx86NxSHM8Mrzym8l_fIDo8gw,1195
|
|
@@ -205,7 +205,7 @@ ultralytics/trackers/utils/matching.py,sha256=3Ie1WNNRZ4_q3365F03XD7Nr9juZB_08mw
|
|
|
205
205
|
ultralytics/utils/__init__.py,sha256=Vl0nNyniKdFJYkQfwHnQ3CFS8GwqajZk5iY2m7l1irA,48238
|
|
206
206
|
ultralytics/utils/autobatch.py,sha256=AXboYfNSnTGsYj5FmgGYPQd0crfkeleyms6QXQfZGQ4,4194
|
|
207
207
|
ultralytics/utils/benchmarks.py,sha256=IN6ZqU-1DVHnwRsdgS_vcBhng8DUMRIEjEEgdrl1mdY,25101
|
|
208
|
-
ultralytics/utils/checks.py,sha256=
|
|
208
|
+
ultralytics/utils/checks.py,sha256=PgvIYpYw8gmwifDShoUuSil396FPf9KmZWh4FaWtSWA,28910
|
|
209
209
|
ultralytics/utils/dist.py,sha256=NDFga-uKxkBX2zLxFHSene_cCiGQJoyOeCXcN9JIOIk,2358
|
|
210
210
|
ultralytics/utils/downloads.py,sha256=97JitihZqvIMS6_TX5rJAG7BI8eYHlu5g8YXlI0RkR4,21998
|
|
211
211
|
ultralytics/utils/errors.py,sha256=GqP_Jgj_n0paxn8OMhn3DTCgoNkB2WjUcUaqs-M6SQk,816
|
|
@@ -231,9 +231,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=5Z3ua5YBTUS56FH8VQKQG1aaIo9fH8GEyz
|
|
|
231
231
|
ultralytics/utils/callbacks/raytune.py,sha256=ODVYzy-CoM4Uge0zjkh3Hnh9nF2M0vhDrSenXnvcizw,705
|
|
232
232
|
ultralytics/utils/callbacks/tensorboard.py,sha256=0kn4IR10no99UCIheojWRujgybmUHSx5fPI6Vsq6l_g,4135
|
|
233
233
|
ultralytics/utils/callbacks/wb.py,sha256=9-fjQIdLjr3b73DTE3rHO171KvbH1VweJ-bmbv-rqTw,6747
|
|
234
|
-
ultralytics-8.3.
|
|
235
|
-
ultralytics-8.3.
|
|
236
|
-
ultralytics-8.3.
|
|
237
|
-
ultralytics-8.3.
|
|
238
|
-
ultralytics-8.3.
|
|
239
|
-
ultralytics-8.3.
|
|
234
|
+
ultralytics-8.3.1.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
|
|
235
|
+
ultralytics-8.3.1.dist-info/METADATA,sha256=nizZPXWp3kanv5QhQYO2nHQ3cBN0T3oN6wmJuHJwRsc,34574
|
|
236
|
+
ultralytics-8.3.1.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
|
237
|
+
ultralytics-8.3.1.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
|
|
238
|
+
ultralytics-8.3.1.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
|
|
239
|
+
ultralytics-8.3.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|