ultralytics 8.2.0__py3-none-any.whl → 8.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ultralytics might be problematic. Click here for more details.
- ultralytics/__init__.py +1 -1
- ultralytics/cfg/__init__.py +2 -2
- ultralytics/cfg/default.yaml +1 -1
- ultralytics/engine/exporter.py +2 -0
- ultralytics/engine/trainer.py +4 -1
- ultralytics/engine/validator.py +1 -1
- ultralytics/models/yolo/classify/val.py +2 -2
- ultralytics/nn/autobackend.py +5 -2
- ultralytics/utils/__init__.py +3 -3
- ultralytics/utils/downloads.py +2 -2
- ultralytics/utils/plotting.py +1 -6
- ultralytics/utils/torch_utils.py +2 -0
- {ultralytics-8.2.0.dist-info → ultralytics-8.2.2.dist-info}/METADATA +35 -35
- {ultralytics-8.2.0.dist-info → ultralytics-8.2.2.dist-info}/RECORD +18 -18
- {ultralytics-8.2.0.dist-info → ultralytics-8.2.2.dist-info}/LICENSE +0 -0
- {ultralytics-8.2.0.dist-info → ultralytics-8.2.2.dist-info}/WHEEL +0 -0
- {ultralytics-8.2.0.dist-info → ultralytics-8.2.2.dist-info}/entry_points.txt +0 -0
- {ultralytics-8.2.0.dist-info → ultralytics-8.2.2.dist-info}/top_level.txt +0 -0
ultralytics/__init__.py
CHANGED
ultralytics/cfg/__init__.py
CHANGED
|
@@ -66,13 +66,13 @@ CLI_HELP_MSG = f"""
|
|
|
66
66
|
See all ARGS at https://docs.ultralytics.com/usage/cfg or with 'yolo cfg'
|
|
67
67
|
|
|
68
68
|
1. Train a detection model for 10 epochs with an initial learning_rate of 0.01
|
|
69
|
-
yolo train data=
|
|
69
|
+
yolo train data=coco8.yaml model=yolov8n.pt epochs=10 lr0=0.01
|
|
70
70
|
|
|
71
71
|
2. Predict a YouTube video using a pretrained segmentation model at image size 320:
|
|
72
72
|
yolo predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320
|
|
73
73
|
|
|
74
74
|
3. Val a pretrained detection model at batch-size 1 and image size 640:
|
|
75
|
-
yolo val model=yolov8n.pt data=
|
|
75
|
+
yolo val model=yolov8n.pt data=coco8.yaml batch=1 imgsz=640
|
|
76
76
|
|
|
77
77
|
4. Export a YOLOv8n classification model to ONNX format at image size 224 by 128 (no TASK required)
|
|
78
78
|
yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128
|
ultralytics/cfg/default.yaml
CHANGED
|
@@ -6,7 +6,7 @@ mode: train # (str) YOLO mode, i.e. train, val, predict, export, track, benchmar
|
|
|
6
6
|
|
|
7
7
|
# Train settings -------------------------------------------------------------------------------------------------------
|
|
8
8
|
model: # (str, optional) path to model file, i.e. yolov8n.pt, yolov8n.yaml
|
|
9
|
-
data: # (str, optional) path to data file, i.e.
|
|
9
|
+
data: # (str, optional) path to data file, i.e. coco8.yaml
|
|
10
10
|
epochs: 100 # (int) number of epochs to train for
|
|
11
11
|
time: # (float, optional) number of hours to train for, overrides epochs if supplied
|
|
12
12
|
patience: 100 # (int) epochs to wait for no observable improvement for early stopping of training
|
ultralytics/engine/exporter.py
CHANGED
|
@@ -50,6 +50,7 @@ TensorFlow.js:
|
|
|
50
50
|
$ npm start
|
|
51
51
|
"""
|
|
52
52
|
|
|
53
|
+
import gc
|
|
53
54
|
import json
|
|
54
55
|
import os
|
|
55
56
|
import shutil
|
|
@@ -713,6 +714,7 @@ class Exporter:
|
|
|
713
714
|
|
|
714
715
|
# Free CUDA memory
|
|
715
716
|
del self.model
|
|
717
|
+
gc.collect()
|
|
716
718
|
torch.cuda.empty_cache()
|
|
717
719
|
|
|
718
720
|
# Write file
|
ultralytics/engine/trainer.py
CHANGED
|
@@ -3,9 +3,10 @@
|
|
|
3
3
|
Train a model on a dataset.
|
|
4
4
|
|
|
5
5
|
Usage:
|
|
6
|
-
$ yolo mode=train model=yolov8n.pt data=
|
|
6
|
+
$ yolo mode=train model=yolov8n.pt data=coco8.yaml imgsz=640 epochs=100 batch=16
|
|
7
7
|
"""
|
|
8
8
|
|
|
9
|
+
import gc
|
|
9
10
|
import math
|
|
10
11
|
import os
|
|
11
12
|
import subprocess
|
|
@@ -437,6 +438,7 @@ class BaseTrainer:
|
|
|
437
438
|
self.scheduler.last_epoch = self.epoch # do not move
|
|
438
439
|
self.stop |= epoch >= self.epochs # stop if exceeded epochs
|
|
439
440
|
self.run_callbacks("on_fit_epoch_end")
|
|
441
|
+
gc.collect()
|
|
440
442
|
torch.cuda.empty_cache() # clear GPU memory at end of epoch, may help reduce CUDA out of memory errors
|
|
441
443
|
|
|
442
444
|
# Early Stopping
|
|
@@ -458,6 +460,7 @@ class BaseTrainer:
|
|
|
458
460
|
if self.args.plots:
|
|
459
461
|
self.plot_metrics()
|
|
460
462
|
self.run_callbacks("on_train_end")
|
|
463
|
+
gc.collect()
|
|
461
464
|
torch.cuda.empty_cache()
|
|
462
465
|
self.run_callbacks("teardown")
|
|
463
466
|
|
ultralytics/engine/validator.py
CHANGED
|
@@ -56,8 +56,8 @@ class ClassificationValidator(BaseValidator):
|
|
|
56
56
|
def update_metrics(self, preds, batch):
|
|
57
57
|
"""Updates running metrics with model predictions and batch targets."""
|
|
58
58
|
n5 = min(len(self.names), 5)
|
|
59
|
-
self.pred.append(preds.argsort(1, descending=True)[:, :n5])
|
|
60
|
-
self.targets.append(batch["cls"])
|
|
59
|
+
self.pred.append(preds.argsort(1, descending=True)[:, :n5].type(torch.int32).cpu())
|
|
60
|
+
self.targets.append(batch["cls"].type(torch.int32).cpu())
|
|
61
61
|
|
|
62
62
|
def finalize_metrics(self, *args, **kwargs):
|
|
63
63
|
"""Finalizes metrics of the model such as confusion_matrix and speed."""
|
ultralytics/nn/autobackend.py
CHANGED
|
@@ -234,8 +234,11 @@ class AutoBackend(nn.Module):
|
|
|
234
234
|
logger = trt.Logger(trt.Logger.INFO)
|
|
235
235
|
# Read file
|
|
236
236
|
with open(w, "rb") as f, trt.Runtime(logger) as runtime:
|
|
237
|
-
|
|
238
|
-
|
|
237
|
+
try:
|
|
238
|
+
meta_len = int.from_bytes(f.read(4), byteorder="little") # read metadata length
|
|
239
|
+
metadata = json.loads(f.read(meta_len).decode("utf-8")) # read metadata
|
|
240
|
+
except UnicodeDecodeError:
|
|
241
|
+
f.seek(0) # engine file may lack embedded Ultralytics metadata
|
|
239
242
|
model = runtime.deserialize_cuda_engine(f.read()) # read engine
|
|
240
243
|
|
|
241
244
|
# Model context
|
ultralytics/utils/__init__.py
CHANGED
|
@@ -61,7 +61,7 @@ HELP_MSG = """
|
|
|
61
61
|
model = YOLO("yolov8n.pt") # load a pretrained model (recommended for training)
|
|
62
62
|
|
|
63
63
|
# Use the model
|
|
64
|
-
results = model.train(data="
|
|
64
|
+
results = model.train(data="coco8.yaml", epochs=3) # train the model
|
|
65
65
|
results = model.val() # evaluate model performance on the validation set
|
|
66
66
|
results = model('https://ultralytics.com/images/bus.jpg') # predict on an image
|
|
67
67
|
success = model.export(format='onnx') # export the model to ONNX format
|
|
@@ -78,13 +78,13 @@ HELP_MSG = """
|
|
|
78
78
|
See all ARGS at https://docs.ultralytics.com/usage/cfg or with 'yolo cfg'
|
|
79
79
|
|
|
80
80
|
- Train a detection model for 10 epochs with an initial learning_rate of 0.01
|
|
81
|
-
yolo detect train data=
|
|
81
|
+
yolo detect train data=coco8.yaml model=yolov8n.pt epochs=10 lr0=0.01
|
|
82
82
|
|
|
83
83
|
- Predict a YouTube video using a pretrained segmentation model at image size 320:
|
|
84
84
|
yolo segment predict model=yolov8n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320
|
|
85
85
|
|
|
86
86
|
- Val a pretrained detection model at batch-size 1 and image size 640:
|
|
87
|
-
yolo detect val model=yolov8n.pt data=
|
|
87
|
+
yolo detect val model=yolov8n.pt data=coco8.yaml batch=1 imgsz=640
|
|
88
88
|
|
|
89
89
|
- Export a YOLOv8n classification model to ONNX format at image size 224 by 128 (no TASK required)
|
|
90
90
|
yolo export model=yolov8n-cls.pt format=onnx imgsz=224,128
|
ultralytics/utils/downloads.py
CHANGED
|
@@ -402,7 +402,7 @@ def get_github_assets(repo="ultralytics/assets", version="latest", retry=False):
|
|
|
402
402
|
return data["tag_name"], [x["name"] for x in data["assets"]] # tag, assets i.e. ['yolov8n.pt', 'yolov8s.pt', ...]
|
|
403
403
|
|
|
404
404
|
|
|
405
|
-
def attempt_download_asset(file, repo="ultralytics/assets", release="v8.
|
|
405
|
+
def attempt_download_asset(file, repo="ultralytics/assets", release="v8.2.0", **kwargs):
|
|
406
406
|
"""
|
|
407
407
|
Attempt to download a file from GitHub release assets if it is not found locally. The function checks for the file
|
|
408
408
|
locally first, then tries to download it from the specified GitHub repository release.
|
|
@@ -410,7 +410,7 @@ def attempt_download_asset(file, repo="ultralytics/assets", release="v8.1.0", **
|
|
|
410
410
|
Args:
|
|
411
411
|
file (str | Path): The filename or file path to be downloaded.
|
|
412
412
|
repo (str, optional): The GitHub repository in the format 'owner/repo'. Defaults to 'ultralytics/assets'.
|
|
413
|
-
release (str, optional): The specific release version to be downloaded. Defaults to 'v8.
|
|
413
|
+
release (str, optional): The specific release version to be downloaded. Defaults to 'v8.2.0'.
|
|
414
414
|
**kwargs (any): Additional keyword arguments for the download process.
|
|
415
415
|
|
|
416
416
|
Returns:
|
ultralytics/utils/plotting.py
CHANGED
|
@@ -440,12 +440,9 @@ class Annotator:
|
|
|
440
440
|
text_x = self.im.shape[1] - int(self.im.shape[1] * 0.025 + max_text_width)
|
|
441
441
|
text_y = int(self.im.shape[0] * 0.025)
|
|
442
442
|
|
|
443
|
-
# Calculate dynamic gap between each count value based on the width of the image
|
|
444
|
-
dynamic_gap = max(1, self.im.shape[1] // 100) * tf
|
|
445
|
-
|
|
446
443
|
for i, count in enumerate(counts):
|
|
447
444
|
text_x_pos = text_x
|
|
448
|
-
text_y_pos = text_y + i *
|
|
445
|
+
text_y_pos = text_y + i * (max_text_height + 25 * tf)
|
|
449
446
|
|
|
450
447
|
# Draw the border
|
|
451
448
|
cv2.rectangle(
|
|
@@ -468,8 +465,6 @@ class Annotator:
|
|
|
468
465
|
lineType=cv2.LINE_AA,
|
|
469
466
|
)
|
|
470
467
|
|
|
471
|
-
text_y_pos += tf * max_text_height
|
|
472
|
-
|
|
473
468
|
@staticmethod
|
|
474
469
|
def estimate_pose_angle(a, b, c):
|
|
475
470
|
"""
|
ultralytics/utils/torch_utils.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
# Ultralytics YOLO 🚀, AGPL-3.0 license
|
|
2
2
|
|
|
3
|
+
import gc
|
|
3
4
|
import math
|
|
4
5
|
import os
|
|
5
6
|
import random
|
|
@@ -581,6 +582,7 @@ def profile(input, ops, n=10, device=None):
|
|
|
581
582
|
except Exception as e:
|
|
582
583
|
LOGGER.info(e)
|
|
583
584
|
results.append(None)
|
|
585
|
+
gc.collect() # attempt to free unused memory
|
|
584
586
|
torch.cuda.empty_cache()
|
|
585
587
|
return results
|
|
586
588
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ultralytics
|
|
3
|
-
Version: 8.2.
|
|
3
|
+
Version: 8.2.2
|
|
4
4
|
Summary: Ultralytics YOLOv8 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
|
|
5
5
|
Author: Glenn Jocher, Ayush Chaurasia, Jing Qiu
|
|
6
6
|
Maintainer: Glenn Jocher, Ayush Chaurasia, Jing Qiu
|
|
@@ -80,8 +80,8 @@ Requires-Dist: dvclive >=2.12.0 ; extra == 'logging'
|
|
|
80
80
|
|
|
81
81
|
<div align="center">
|
|
82
82
|
<p>
|
|
83
|
-
<a href="https://
|
|
84
|
-
<img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/main/
|
|
83
|
+
<a href="https://github.com/ultralytics/assets/releases/tag/v8.2.0" target="_blank">
|
|
84
|
+
<img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png" alt="YOLO Vision banner"></a>
|
|
85
85
|
</p>
|
|
86
86
|
|
|
87
87
|
[中文](https://docs.ultralytics.com/zh/) | [한국어](https://docs.ultralytics.com/ko/) | [日本語](https://docs.ultralytics.com/ja/) | [Русский](https://docs.ultralytics.com/ru/) | [Deutsch](https://docs.ultralytics.com/de/) | [Français](https://docs.ultralytics.com/fr/) | [Español](https://docs.ultralytics.com/es/) | [Português](https://docs.ultralytics.com/pt/) | [हिन्दी](https://docs.ultralytics.com/hi/) | [العربية](https://docs.ultralytics.com/ar/) <br>
|
|
@@ -168,7 +168,7 @@ model = YOLO("yolov8n.yaml") # build a new model from scratch
|
|
|
168
168
|
model = YOLO("yolov8n.pt") # load a pretrained model (recommended for training)
|
|
169
169
|
|
|
170
170
|
# Use the model
|
|
171
|
-
model.train(data="
|
|
171
|
+
model.train(data="coco8.yaml", epochs=3) # train the model
|
|
172
172
|
metrics = model.val() # evaluate model performance on the validation set
|
|
173
173
|
results = model("https://ultralytics.com/images/bus.jpg") # predict on an image
|
|
174
174
|
path = model.export(format="onnx") # export the model to ONNX format
|
|
@@ -205,11 +205,11 @@ See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examp
|
|
|
205
205
|
|
|
206
206
|
| Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>A100 TensorRT<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
207
207
|
| ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
208
|
-
| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v8.
|
|
209
|
-
| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v8.
|
|
210
|
-
| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v8.
|
|
211
|
-
| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v8.
|
|
212
|
-
| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v8.
|
|
208
|
+
| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n.pt) | 640 | 37.3 | 80.4 | 0.99 | 3.2 | 8.7 |
|
|
209
|
+
| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8s.pt) | 640 | 44.9 | 128.4 | 1.20 | 11.2 | 28.6 |
|
|
210
|
+
| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8m.pt) | 640 | 50.2 | 234.7 | 1.83 | 25.9 | 78.9 |
|
|
211
|
+
| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8l.pt) | 640 | 52.9 | 375.2 | 2.39 | 43.7 | 165.2 |
|
|
212
|
+
| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x.pt) | 640 | 53.9 | 479.1 | 3.53 | 68.2 | 257.8 |
|
|
213
213
|
|
|
214
214
|
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org) dataset. <br>Reproduce by `yolo val detect data=coco.yaml device=0`
|
|
215
215
|
- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val detect data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -222,11 +222,11 @@ See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examp
|
|
|
222
222
|
|
|
223
223
|
| Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>A100 TensorRT<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
224
224
|
| ----------------------------------------------------------------------------------------- | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
225
|
-
| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v8.
|
|
226
|
-
| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v8.
|
|
227
|
-
| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v8.
|
|
228
|
-
| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v8.
|
|
229
|
-
| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v8.
|
|
225
|
+
| [YOLOv8n](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n-oiv7.pt) | 640 | 18.4 | 142.4 | 1.21 | 3.5 | 10.5 |
|
|
226
|
+
| [YOLOv8s](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8s-oiv7.pt) | 640 | 27.7 | 183.1 | 1.40 | 11.4 | 29.7 |
|
|
227
|
+
| [YOLOv8m](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8m-oiv7.pt) | 640 | 33.6 | 408.5 | 2.26 | 26.2 | 80.6 |
|
|
228
|
+
| [YOLOv8l](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8l-oiv7.pt) | 640 | 34.9 | 596.9 | 2.43 | 44.1 | 167.4 |
|
|
229
|
+
| [YOLOv8x](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-oiv7.pt) | 640 | 36.3 | 860.6 | 3.56 | 68.7 | 260.6 |
|
|
230
230
|
|
|
231
231
|
- **mAP<sup>val</sup>** values are for single-model single-scale on [Open Image V7](https://docs.ultralytics.com/datasets/detect/open-images-v7/) dataset. <br>Reproduce by `yolo val detect data=open-images-v7.yaml device=0`
|
|
232
232
|
- **Speed** averaged over Open Image V7 val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val detect data=open-images-v7.yaml batch=1 device=0|cpu`
|
|
@@ -239,11 +239,11 @@ See [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage e
|
|
|
239
239
|
|
|
240
240
|
| Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>A100 TensorRT<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
241
241
|
| -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
242
|
-
| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v8.
|
|
243
|
-
| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v8.
|
|
244
|
-
| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v8.
|
|
245
|
-
| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v8.
|
|
246
|
-
| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v8.
|
|
242
|
+
| [YOLOv8n-seg](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n-seg.pt) | 640 | 36.7 | 30.5 | 96.1 | 1.21 | 3.4 | 12.6 |
|
|
243
|
+
| [YOLOv8s-seg](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8s-seg.pt) | 640 | 44.6 | 36.8 | 155.7 | 1.47 | 11.8 | 42.6 |
|
|
244
|
+
| [YOLOv8m-seg](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8m-seg.pt) | 640 | 49.9 | 40.8 | 317.0 | 2.18 | 27.3 | 110.2 |
|
|
245
|
+
| [YOLOv8l-seg](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8l-seg.pt) | 640 | 52.3 | 42.6 | 572.4 | 2.79 | 46.0 | 220.5 |
|
|
246
|
+
| [YOLOv8x-seg](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-seg.pt) | 640 | 53.4 | 43.4 | 712.1 | 4.02 | 71.8 | 344.1 |
|
|
247
247
|
|
|
248
248
|
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org) dataset. <br>Reproduce by `yolo val segment data=coco-seg.yaml device=0`
|
|
249
249
|
- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val segment data=coco-seg.yaml batch=1 device=0|cpu`
|
|
@@ -256,12 +256,12 @@ See [Pose Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples wit
|
|
|
256
256
|
|
|
257
257
|
| Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>A100 TensorRT<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
258
258
|
| ---------------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
259
|
-
| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v8.
|
|
260
|
-
| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v8.
|
|
261
|
-
| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v8.
|
|
262
|
-
| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v8.
|
|
263
|
-
| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v8.
|
|
264
|
-
| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v8.
|
|
259
|
+
| [YOLOv8n-pose](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n-pose.pt) | 640 | 50.4 | 80.1 | 131.8 | 1.18 | 3.3 | 9.2 |
|
|
260
|
+
| [YOLOv8s-pose](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8s-pose.pt) | 640 | 60.0 | 86.2 | 233.2 | 1.42 | 11.6 | 30.2 |
|
|
261
|
+
| [YOLOv8m-pose](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8m-pose.pt) | 640 | 65.0 | 88.8 | 456.3 | 2.00 | 26.4 | 81.0 |
|
|
262
|
+
| [YOLOv8l-pose](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8l-pose.pt) | 640 | 67.6 | 90.0 | 784.5 | 2.59 | 44.4 | 168.6 |
|
|
263
|
+
| [YOLOv8x-pose](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-pose.pt) | 640 | 69.2 | 90.2 | 1607.1 | 3.73 | 69.4 | 263.2 |
|
|
264
|
+
| [YOLOv8x-pose-p6](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-pose-p6.pt) | 1280 | 71.6 | 91.2 | 4088.7 | 10.04 | 99.1 | 1066.4 |
|
|
265
265
|
|
|
266
266
|
- **mAP<sup>val</sup>** values are for single-model single-scale on [COCO Keypoints val2017](https://cocodataset.org) dataset. <br>Reproduce by `yolo val pose data=coco-pose.yaml device=0`
|
|
267
267
|
- **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
|
|
@@ -274,11 +274,11 @@ See [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples with
|
|
|
274
274
|
|
|
275
275
|
| Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>A100 TensorRT<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
276
276
|
| -------------------------------------------------------------------------------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
|
|
277
|
-
| [YOLOv8n-obb](https://github.com/ultralytics/assets/releases/download/v8.
|
|
278
|
-
| [YOLOv8s-obb](https://github.com/ultralytics/assets/releases/download/v8.
|
|
279
|
-
| [YOLOv8m-obb](https://github.com/ultralytics/assets/releases/download/v8.
|
|
280
|
-
| [YOLOv8l-obb](https://github.com/ultralytics/assets/releases/download/v8.
|
|
281
|
-
| [YOLOv8x-obb](https://github.com/ultralytics/assets/releases/download/v8.
|
|
277
|
+
| [YOLOv8n-obb](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n-obb.pt) | 1024 | 78.0 | 204.77 | 3.57 | 3.1 | 23.3 |
|
|
278
|
+
| [YOLOv8s-obb](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8s-obb.pt) | 1024 | 79.5 | 424.88 | 4.07 | 11.4 | 76.3 |
|
|
279
|
+
| [YOLOv8m-obb](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8m-obb.pt) | 1024 | 80.5 | 763.48 | 7.61 | 26.4 | 208.6 |
|
|
280
|
+
| [YOLOv8l-obb](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8l-obb.pt) | 1024 | 80.7 | 1278.42 | 11.83 | 44.5 | 433.8 |
|
|
281
|
+
| [YOLOv8x-obb](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-obb.pt) | 1024 | 81.36 | 1759.10 | 13.23 | 69.5 | 676.7 |
|
|
282
282
|
|
|
283
283
|
- **mAP<sup>test</sup>** values are for single-model multiscale on [DOTAv1](https://captain-whu.github.io/DOTA/index.html) dataset. <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to [DOTA evaluation](https://captain-whu.github.io/DOTA/evaluation.html).
|
|
284
284
|
- **Speed** averaged over DOTAv1 val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
|
|
@@ -291,11 +291,11 @@ See [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usag
|
|
|
291
291
|
|
|
292
292
|
| Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>A100 TensorRT<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) at 640 |
|
|
293
293
|
| -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ |
|
|
294
|
-
| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v8.
|
|
295
|
-
| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v8.
|
|
296
|
-
| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v8.
|
|
297
|
-
| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v8.
|
|
298
|
-
| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v8.
|
|
294
|
+
| [YOLOv8n-cls](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8n-cls.pt) | 224 | 69.0 | 88.3 | 12.9 | 0.31 | 2.7 | 4.3 |
|
|
295
|
+
| [YOLOv8s-cls](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8s-cls.pt) | 224 | 73.8 | 91.7 | 23.4 | 0.35 | 6.4 | 13.5 |
|
|
296
|
+
| [YOLOv8m-cls](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8m-cls.pt) | 224 | 76.8 | 93.5 | 85.4 | 0.62 | 17.0 | 42.7 |
|
|
297
|
+
| [YOLOv8l-cls](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8l-cls.pt) | 224 | 76.8 | 93.5 | 163.0 | 0.87 | 37.5 | 99.7 |
|
|
298
|
+
| [YOLOv8x-cls](https://github.com/ultralytics/assets/releases/download/v8.2.0/yolov8x-cls.pt) | 224 | 79.0 | 94.6 | 232.0 | 1.01 | 57.4 | 154.8 |
|
|
299
299
|
|
|
300
300
|
- **acc** values are model accuracies on the [ImageNet](https://www.image-net.org/) dataset validation set. <br>Reproduce by `yolo val classify data=path/to/ImageNet device=0`
|
|
301
301
|
- **Speed** averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
ultralytics/__init__.py,sha256=
|
|
1
|
+
ultralytics/__init__.py,sha256=cL4PVaHbKje8it1MXrZE9VsBIJTpuATEUI5p2I7YuEo,632
|
|
2
2
|
ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
|
|
3
3
|
ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
|
|
4
|
-
ultralytics/cfg/__init__.py,sha256=
|
|
5
|
-
ultralytics/cfg/default.yaml,sha256=
|
|
4
|
+
ultralytics/cfg/__init__.py,sha256=4ZnvY2ULMGofFhjaRIzKQlGC5YVkvWkEAYAhnsKC1Po,21312
|
|
5
|
+
ultralytics/cfg/default.yaml,sha256=KoXq5DHQK-Voge9DbkySd2rRpDizG6Oq-A4Byqz5Exc,8211
|
|
6
6
|
ultralytics/cfg/datasets/Argoverse.yaml,sha256=FyeuJT5CHq_9d4hlfAf0kpZlnbUMO0S--UJ1yIqcdKk,3134
|
|
7
7
|
ultralytics/cfg/datasets/DOTAv1.5.yaml,sha256=YDsyFPI6F6-OQXLBM3hOXo3vADYREwZzmMQfJNdpWyM,1193
|
|
8
8
|
ultralytics/cfg/datasets/DOTAv1.yaml,sha256=dxLUliHvJOW4q4vJRu5qIYVvNfjvXWB7GVh_Fhk--dM,1163
|
|
@@ -78,13 +78,13 @@ ultralytics/data/explorer/utils.py,sha256=EvvukQiQUTBrsZznmMnyEX2EqTuwZo_Geyc8yf
|
|
|
78
78
|
ultralytics/data/explorer/gui/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
|
|
79
79
|
ultralytics/data/explorer/gui/dash.py,sha256=2oAbNroR2lfS45v53M1sRqZklLXbbj6qXqNxvplulC0,10087
|
|
80
80
|
ultralytics/engine/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
|
|
81
|
-
ultralytics/engine/exporter.py,sha256=
|
|
81
|
+
ultralytics/engine/exporter.py,sha256=KW3PwxgzlNBj44oiYAKT4PVS4uexLZT5H8Qevc2Q8qg,54507
|
|
82
82
|
ultralytics/engine/model.py,sha256=4zSVSBP8Ex49bJjnOXm7g3Qr_NgbplHPCjdnVfZwfxM,40019
|
|
83
83
|
ultralytics/engine/predictor.py,sha256=wQRKdWGDTP5A6CS0gTC6U3RPDMhP3QkEzWSPm6eqCkU,17022
|
|
84
84
|
ultralytics/engine/results.py,sha256=MvrOBrBlRF7kbL-QwysMf9mIDy_lwQBTTYvy1x1FMME,30667
|
|
85
|
-
ultralytics/engine/trainer.py,sha256=
|
|
85
|
+
ultralytics/engine/trainer.py,sha256=FK2PkQyUThIU5RYr8Qa38JZDRB3iOl85Sdbi4HrlQ5U,34987
|
|
86
86
|
ultralytics/engine/tuner.py,sha256=iZrgMmXSDpfuDu4bdFRflmAsscys2-8W8qAGxSyOVJE,11844
|
|
87
|
-
ultralytics/engine/validator.py,sha256=
|
|
87
|
+
ultralytics/engine/validator.py,sha256=Y21Uo8_Zto4qjk_YqQk6k7tyfpq_Qk9cfjeXeyDRxs8,14643
|
|
88
88
|
ultralytics/hub/__init__.py,sha256=U4j-2QPdwSDlxw6RgFYnnJXOoIzLtwke4TkY2A8q4ws,5068
|
|
89
89
|
ultralytics/hub/auth.py,sha256=FID58NE6fh7Op_B45QOpWBw1qoBN0ponL16uvyb2dZ8,5399
|
|
90
90
|
ultralytics/hub/session.py,sha256=Oly3bKjLkW08iOm3QoSr6Yy57aLZ4AmAmF6Pp9Y_q5g,15197
|
|
@@ -124,7 +124,7 @@ ultralytics/models/yolo/model.py,sha256=EwjRD9QrLP7qxqqjj-Q1II4RdjTZTyssn_n1iwO6
|
|
|
124
124
|
ultralytics/models/yolo/classify/__init__.py,sha256=t-4pUHmgI2gjhc-l3bqNEcEtKD1dO40nD4Vc6Y2xD6o,355
|
|
125
125
|
ultralytics/models/yolo/classify/predict.py,sha256=wFY4GIlWxe7idMndEw1RnDI63o53MTfiHKz0s2fOjAY,2513
|
|
126
126
|
ultralytics/models/yolo/classify/train.py,sha256=9CRqtLkePo4ZkAzMTxDY4ztrNaWE34qnytYymfCEBzs,6888
|
|
127
|
-
ultralytics/models/yolo/classify/val.py,sha256=
|
|
127
|
+
ultralytics/models/yolo/classify/val.py,sha256=MXdtWrBYVpfFuPfFPOTLKa_wBdTIA4dBZguT-EtldZ4,4909
|
|
128
128
|
ultralytics/models/yolo/detect/__init__.py,sha256=JR8gZJWn7wMBbh-0j_073nxJVZTMFZVWTOG5Wnvk6w0,229
|
|
129
129
|
ultralytics/models/yolo/detect/predict.py,sha256=_a9vH3DmKFY6eeztFTdj3nkfu_MKG6n7zb5rRKGjs9I,1510
|
|
130
130
|
ultralytics/models/yolo/detect/train.py,sha256=8Ulq1SPNLrkOqXj0Yt5zNR1c_Xl_QnOjllCdqBHUMds,6353
|
|
@@ -145,7 +145,7 @@ ultralytics/models/yolo/world/__init__.py,sha256=3VTH0q4NOt2EWRom15yCymvmvm0Etp2
|
|
|
145
145
|
ultralytics/models/yolo/world/train.py,sha256=acYN2-onL69LrL4av6_hY2r5AY0urC0WViDstn7npfI,3686
|
|
146
146
|
ultralytics/models/yolo/world/train_world.py,sha256=ICPsYNbuPkq_qf3FHl2YJ-q3g7ik0pI-zhMpLmHa5-4,4805
|
|
147
147
|
ultralytics/nn/__init__.py,sha256=4BPLHY89xEM_al5uK0aOmFgiML6CMGEZbezxOvTjOEs,587
|
|
148
|
-
ultralytics/nn/autobackend.py,sha256=
|
|
148
|
+
ultralytics/nn/autobackend.py,sha256=6amaXnbDlvh0kTIbeHV3kIM6X7P1r0T3le1GPxIgkOs,30864
|
|
149
149
|
ultralytics/nn/tasks.py,sha256=a3FSkIUErlE7qI506ye5vGggqzMxqXWDkIbbLD4AGyI,43623
|
|
150
150
|
ultralytics/nn/modules/__init__.py,sha256=KzLoyn2ldfReiQL8H8xsMC49Xvtb8Kv9ikE5Q3OBoAs,2326
|
|
151
151
|
ultralytics/nn/modules/block.py,sha256=smIz3oNTDA7UKrAH5FfSMh08C12-avgWTeIkbgZIv18,25251
|
|
@@ -169,12 +169,12 @@ ultralytics/trackers/utils/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7J
|
|
|
169
169
|
ultralytics/trackers/utils/gmc.py,sha256=vwcPA1n5zjPaBGhCDt8ItN7rq_6Sczsjn4gsXJfRylU,13688
|
|
170
170
|
ultralytics/trackers/utils/kalman_filter.py,sha256=0oqhk59NKEiwcJ2FXnw6_sT4bIFC6Wu5IY2B-TGxJKU,15168
|
|
171
171
|
ultralytics/trackers/utils/matching.py,sha256=UxhSGa5pN6WoYwYSBAkkt-O7xMxUR47VuUB6PfVNkb4,5404
|
|
172
|
-
ultralytics/utils/__init__.py,sha256=
|
|
172
|
+
ultralytics/utils/__init__.py,sha256=BdmRL2UhbmzmWuhaB1iDUTOyQ3fTwOrB0aUijAgpOUg,39286
|
|
173
173
|
ultralytics/utils/autobatch.py,sha256=ygZ3f2ByIkcujB89ENcTnGWWnAQw5Pbg6nBuShg-5t4,3863
|
|
174
174
|
ultralytics/utils/benchmarks.py,sha256=dVAQ7GjZmgjvGL9JglKA3d9HAnvGoyX2TaEmZJjk0HA,18539
|
|
175
175
|
ultralytics/utils/checks.py,sha256=UDrcHiTMjSHSyUZflTRGuyYRj0uz9-RQ-xfDq_lsXZo,27971
|
|
176
176
|
ultralytics/utils/dist.py,sha256=3HeNbY2gp7vYhcvVhsrvTrQXpQmgT8tpmnzApf3eQRA,2267
|
|
177
|
-
ultralytics/utils/downloads.py,sha256=
|
|
177
|
+
ultralytics/utils/downloads.py,sha256=Rx32imHkKyVltEDMiCtCT2N5aA9Cud_0PyIUoTh4ru0,21496
|
|
178
178
|
ultralytics/utils/errors.py,sha256=GqP_Jgj_n0paxn8OMhn3DTCgoNkB2WjUcUaqs-M6SQk,816
|
|
179
179
|
ultralytics/utils/files.py,sha256=TVfY0Wi5IsUc4YdsDzC0dAg-jAP5exYvwqB3VmXhDLY,6761
|
|
180
180
|
ultralytics/utils/instance.py,sha256=fPClvPPtTk8VeXWiRv90DrFk1j1lTUKdYJtpZKUDDtA,15575
|
|
@@ -182,9 +182,9 @@ ultralytics/utils/loss.py,sha256=ejXnPEIAzNEoNz2UjW0_fcdeUs9Hy-jPzUrJ3FiIIwE,327
|
|
|
182
182
|
ultralytics/utils/metrics.py,sha256=XPD-xP0fchR8KgCuTcihV2-n0EK1cWi3-53BWN_pLuA,53518
|
|
183
183
|
ultralytics/utils/ops.py,sha256=wZCWx7dm5GJNIJHyZaFJRetGcQ7prdv-anplqq9figQ,33309
|
|
184
184
|
ultralytics/utils/patches.py,sha256=SgMqeMsq2K6JoBJP1NplXMl9C6rK0JeJUChjBrJOneo,2750
|
|
185
|
-
ultralytics/utils/plotting.py,sha256=
|
|
185
|
+
ultralytics/utils/plotting.py,sha256=bmuQIlH8wJRp9ASRmVfiXJrr4iDwEPTS_8WniCzyqVc,47332
|
|
186
186
|
ultralytics/utils/tal.py,sha256=xuIyryUjaaYHkHPG9GvBwh1xxN2Hq4y3hXOtuERehwY,16017
|
|
187
|
-
ultralytics/utils/torch_utils.py,sha256
|
|
187
|
+
ultralytics/utils/torch_utils.py,sha256=y1qJniyii0sJFg8dpP-yjYh8AMOoFok9NEZcRi669Jo,25916
|
|
188
188
|
ultralytics/utils/triton.py,sha256=gg1finxno_tY2Ge9PMhmu7PI9wvoFZoiicdT4Bhqv3w,3936
|
|
189
189
|
ultralytics/utils/tuner.py,sha256=JhvBp6haKA6eqpNPpGJzzjjCmPxBx5phk9kHmt_jppw,6171
|
|
190
190
|
ultralytics/utils/callbacks/__init__.py,sha256=YrWqC3BVVaTLob4iCPR6I36mUxIUOpPJW7B_LjT78Qw,214
|
|
@@ -198,9 +198,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=5Z3ua5YBTUS56FH8VQKQG1aaIo9fH8GEyz
|
|
|
198
198
|
ultralytics/utils/callbacks/raytune.py,sha256=ODVYzy-CoM4Uge0zjkh3Hnh9nF2M0vhDrSenXnvcizw,705
|
|
199
199
|
ultralytics/utils/callbacks/tensorboard.py,sha256=Z1veCVcn9THPhdplWuIzwlsW2yF7y-On9IZIk3khM0Y,4135
|
|
200
200
|
ultralytics/utils/callbacks/wb.py,sha256=woCQVuZzqtM5KnwxIibcfM3sFBYojeMPnv11jrRaIQA,6674
|
|
201
|
-
ultralytics-8.2.
|
|
202
|
-
ultralytics-8.2.
|
|
203
|
-
ultralytics-8.2.
|
|
204
|
-
ultralytics-8.2.
|
|
205
|
-
ultralytics-8.2.
|
|
206
|
-
ultralytics-8.2.
|
|
201
|
+
ultralytics-8.2.2.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
|
|
202
|
+
ultralytics-8.2.2.dist-info/METADATA,sha256=CnxRE4dEVOgykApuwfT8TswCppWNigG1efP-1Ut3Ptc,40448
|
|
203
|
+
ultralytics-8.2.2.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
204
|
+
ultralytics-8.2.2.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
|
|
205
|
+
ultralytics-8.2.2.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
|
|
206
|
+
ultralytics-8.2.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|