ultralytics 8.3.2__py3-none-any.whl → 8.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ultralytics might be problematic. Click here for more details.

ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
- __version__ = "8.3.2"
3
+ __version__ = "8.3.3"
4
4
 
5
5
  import os
6
6
 
ultralytics/hub/utils.py CHANGED
@@ -170,7 +170,7 @@ def smart_request(method, url, retry=3, timeout=30, thread=True, code=-1, verbos
170
170
  class Events:
171
171
  """
172
172
  A class for collecting anonymous event analytics. Event analytics are enabled when sync=True in settings and
173
- disabled when sync=False. Run 'yolo settings' to see and update settings YAML file.
173
+ disabled when sync=False. Run 'yolo settings' to see and update settings.
174
174
 
175
175
  Attributes:
176
176
  url (str): The URL to send anonymous events.
@@ -23,13 +23,13 @@ def inference(model=None):
23
23
  # Main title of streamlit application
24
24
  main_title_cfg = """<div><h1 style="color:#FF64DA; text-align:center; font-size:40px;
25
25
  font-family: 'Archivo', sans-serif; margin-top:-50px;margin-bottom:20px;">
26
- Ultralytics YOLOv8 Streamlit Application
26
+ Ultralytics YOLO Streamlit Application
27
27
  </h1></div>"""
28
28
 
29
29
  # Subtitle of streamlit application
30
30
  sub_title_cfg = """<div><h4 style="color:#042AFF; text-align:center;
31
31
  font-family: 'Archivo', sans-serif; margin-top:-15px; margin-bottom:50px;">
32
- Experience real-time object detection on your webcam with the power of Ultralytics YOLOv8! 🚀</h4>
32
+ Experience real-time object detection on your webcam with the power of Ultralytics YOLO! 🚀</h4>
33
33
  </div>"""
34
34
 
35
35
  # Set html page configuration
@@ -67,7 +67,7 @@ def inference(model=None):
67
67
  vid_file_name = 0
68
68
 
69
69
  # Add dropdown menu for model selection
70
- available_models = [x.replace("yolo", "YOLO") for x in GITHUB_ASSETS_STEMS if x.startswith("yolov8")]
70
+ available_models = [x.replace("yolo", "YOLO") for x in GITHUB_ASSETS_STEMS if x.startswith("yolo11")]
71
71
  if model:
72
72
  available_models.insert(0, model.split(".pt")[0]) # insert model without suffix as *.pt is added later
73
73
 
@@ -971,7 +971,7 @@ def threaded(func):
971
971
  def set_sentry():
972
972
  """
973
973
  Initialize the Sentry SDK for error tracking and reporting. Only used if sentry_sdk package is installed and
974
- sync=True in settings. Run 'yolo settings' to see and update settings YAML file.
974
+ sync=True in settings. Run 'yolo settings' to see and update settings.
975
975
 
976
976
  Conditions required to send errors (ALL conditions must be met or no errors will be reported):
977
977
  - sentry_sdk package is installed
@@ -983,36 +983,11 @@ def set_sentry():
983
983
  - online environment
984
984
  - CLI used to run package (checked with 'yolo' as the name of the main CLI command)
985
985
 
986
- The function also configures Sentry SDK to ignore KeyboardInterrupt and FileNotFoundError
987
- exceptions and to exclude events with 'out of memory' in their exception message.
986
+ The function also configures Sentry SDK to ignore KeyboardInterrupt and FileNotFoundError exceptions and to exclude
987
+ events with 'out of memory' in their exception message.
988
988
 
989
989
  Additionally, the function sets custom tags and user information for Sentry events.
990
990
  """
991
-
992
- def before_send(event, hint):
993
- """
994
- Modify the event before sending it to Sentry based on specific exception types and messages.
995
-
996
- Args:
997
- event (dict): The event dictionary containing information about the error.
998
- hint (dict): A dictionary containing additional information about the error.
999
-
1000
- Returns:
1001
- dict: The modified event or None if the event should not be sent to Sentry.
1002
- """
1003
- if "exc_info" in hint:
1004
- exc_type, exc_value, tb = hint["exc_info"]
1005
- if exc_type in {KeyboardInterrupt, FileNotFoundError} or "out of memory" in str(exc_value):
1006
- return None # do not send event
1007
-
1008
- event["tags"] = {
1009
- "sys_argv": ARGV[0],
1010
- "sys_argv_name": Path(ARGV[0]).name,
1011
- "install": "git" if IS_GIT_DIR else "pip" if IS_PIP_PACKAGE else "other",
1012
- "os": ENVIRONMENT,
1013
- }
1014
- return event
1015
-
1016
991
  if (
1017
992
  SETTINGS["sync"]
1018
993
  and RANK in {-1, 0}
@@ -1028,8 +1003,32 @@ def set_sentry():
1028
1003
  except ImportError:
1029
1004
  return
1030
1005
 
1006
+ def before_send(event, hint):
1007
+ """
1008
+ Modify the event before sending it to Sentry based on specific exception types and messages.
1009
+
1010
+ Args:
1011
+ event (dict): The event dictionary containing information about the error.
1012
+ hint (dict): A dictionary containing additional information about the error.
1013
+
1014
+ Returns:
1015
+ dict: The modified event or None if the event should not be sent to Sentry.
1016
+ """
1017
+ if "exc_info" in hint:
1018
+ exc_type, exc_value, _ = hint["exc_info"]
1019
+ if exc_type in {KeyboardInterrupt, FileNotFoundError} or "out of memory" in str(exc_value):
1020
+ return None # do not send event
1021
+
1022
+ event["tags"] = {
1023
+ "sys_argv": ARGV[0],
1024
+ "sys_argv_name": Path(ARGV[0]).name,
1025
+ "install": "git" if IS_GIT_DIR else "pip" if IS_PIP_PACKAGE else "other",
1026
+ "os": ENVIRONMENT,
1027
+ }
1028
+ return event
1029
+
1031
1030
  sentry_sdk.init(
1032
- dsn="https://5ff1556b71594bfea135ff0203a0d290@o4504521589325824.ingest.sentry.io/4504521592406016",
1031
+ dsn="https://888e5a0778212e1d0314c37d4b9aae5d@o4504521589325824.ingest.us.sentry.io/4504521592406016",
1033
1032
  debug=False,
1034
1033
  traces_sample_rate=1.0,
1035
1034
  release=__version__,
@@ -1170,25 +1169,26 @@ class SettingsManager(JSONDict):
1170
1169
  self.file = Path(file)
1171
1170
  self.version = version
1172
1171
  self.defaults = {
1173
- "settings_version": version,
1174
- "datasets_dir": str(datasets_root / "datasets"),
1175
- "weights_dir": str(root / "weights"),
1176
- "runs_dir": str(root / "runs"),
1177
- "uuid": hashlib.sha256(str(uuid.getnode()).encode()).hexdigest(),
1178
- "sync": True,
1179
- "api_key": "",
1180
- "openai_api_key": "",
1181
- "clearml": True, # integrations
1182
- "comet": True,
1183
- "dvc": True,
1184
- "hub": True,
1185
- "mlflow": True,
1186
- "neptune": True,
1187
- "raytune": True,
1188
- "tensorboard": True,
1189
- "wandb": True,
1190
- "vscode_msg": True,
1172
+ "settings_version": version, # Settings schema version
1173
+ "datasets_dir": str(datasets_root / "datasets"), # Datasets directory
1174
+ "weights_dir": str(root / "weights"), # Model weights directory
1175
+ "runs_dir": str(root / "runs"), # Experiment runs directory
1176
+ "uuid": hashlib.sha256(str(uuid.getnode()).encode()).hexdigest(), # SHA-256 anonymized UUID hash
1177
+ "sync": True, # Enable synchronization
1178
+ "api_key": "", # Ultralytics API Key
1179
+ "openai_api_key": "", # OpenAI API Key
1180
+ "clearml": True, # ClearML integration
1181
+ "comet": True, # Comet integration
1182
+ "dvc": True, # DVC integration
1183
+ "hub": True, # Ultralytics HUB integration
1184
+ "mlflow": True, # MLflow integration
1185
+ "neptune": True, # Neptune integration
1186
+ "raytune": True, # Ray Tune integration
1187
+ "tensorboard": True, # TensorBoard logging
1188
+ "wandb": True, # Weights & Biases logging
1189
+ "vscode_msg": True, # VSCode messaging
1191
1190
  }
1191
+
1192
1192
  self.help_msg = (
1193
1193
  f"\nView Ultralytics Settings with 'yolo settings' or at '{self.file}'"
1194
1194
  "\nUpdate Settings with 'yolo settings key=value', i.e. 'yolo settings runs_dir=path/to/dir'. "
@@ -536,8 +536,8 @@ class ProfileModels:
536
536
  """Generates a table row string with model performance metrics including inference times and model details."""
537
537
  layers, params, gradients, flops = model_info
538
538
  return (
539
- f"| {model_name:18s} | {self.imgsz} | - | {t_onnx[0]:.2f} ± {t_onnx[1]:.2f} ms | {t_engine[0]:.2f} ± "
540
- f"{t_engine[1]:.2f} ms | {params / 1e6:.1f} | {flops:.1f} |"
539
+ f"| {model_name:18s} | {self.imgsz} | - | {t_onnx[0]:.1f}±{t_onnx[1]:.1f} ms | {t_engine[0]:.1f}±"
540
+ f"{t_engine[1]:.1f} ms | {params / 1e6:.1f} | {flops:.1f} |"
541
541
  )
542
542
 
543
543
  @staticmethod
@@ -591,8 +591,8 @@ class Annotator:
591
591
  Args:
592
592
  label (str): queue counts label
593
593
  points (tuple): region points for center point calculation to display text
594
- region_color (RGB): queue region color
595
- txt_color (RGB): text display color
594
+ region_color (tuple): RGB queue region color.
595
+ txt_color (tuple): RGB text display color.
596
596
  """
597
597
  x_values = [point[0] for point in points]
598
598
  y_values = [point[1] for point in points]
@@ -631,8 +631,8 @@ class Annotator:
631
631
  Args:
632
632
  im0 (ndarray): inference image
633
633
  text (str): object/class name
634
- txt_color (bgr color): display color for text foreground
635
- bg_color (bgr color): display color for text background
634
+ txt_color (tuple): display color for text foreground
635
+ bg_color (tuple): display color for text background
636
636
  x_center (float): x position center point for bounding box
637
637
  y_center (float): y position center point for bounding box
638
638
  margin (int): gap between text and rectangle for better display
@@ -655,8 +655,8 @@ class Annotator:
655
655
  Args:
656
656
  im0 (ndarray): inference image
657
657
  text (dict): labels dictionary
658
- txt_color (bgr color): display color for text foreground
659
- bg_color (bgr color): display color for text background
658
+ txt_color (tuple): display color for text foreground
659
+ bg_color (tuple): display color for text background
660
660
  margin (int): gap between text and rectangle for better display
661
661
  """
662
662
  horizontal_gap = int(im0.shape[1] * 0.02)
@@ -805,11 +805,14 @@ class Annotator:
805
805
  Function for drawing segmented object in bounding box shape.
806
806
 
807
807
  Args:
808
- mask (list): masks data list for instance segmentation area plotting
809
- mask_color (RGB): mask foreground color
810
- label (str): Detection label text
811
- txt_color (RGB): text color
808
+ mask (np.ndarray): A 2D array of shape (N, 2) containing the contour points of the segmented object.
809
+ mask_color (tuple): RGB color for the contour and label background.
810
+ label (str, optional): Text label for the object. If None, no label is drawn.
811
+ txt_color (tuple): RGB color for the label text.
812
812
  """
813
+ if mask.size == 0: # no masks to plot
814
+ return
815
+
813
816
  cv2.polylines(self.im, [np.int32([mask])], isClosed=True, color=mask_color, thickness=2)
814
817
  text_size, _ = cv2.getTextSize(label, 0, self.sf, self.tf)
815
818
 
@@ -833,8 +836,8 @@ class Annotator:
833
836
  Args:
834
837
  pixels_distance (float): Pixels distance between two bbox centroids.
835
838
  centroids (list): Bounding box centroids data.
836
- line_color (RGB): Distance line color.
837
- centroid_color (RGB): Bounding box centroid color.
839
+ line_color (tuple): RGB distance line color.
840
+ centroid_color (tuple): RGB bounding box centroid color.
838
841
  """
839
842
  # Get the text size
840
843
  (text_width_m, text_height_m), _ = cv2.getTextSize(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.3.2
3
+ Version: 8.3.3
4
4
  Summary: Ultralytics YOLO for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author: Ayush Chaurasia
6
6
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
@@ -214,11 +214,11 @@ See [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examp
214
214
 
215
215
  | Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
216
216
  | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
217
- | [YOLO11n](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n.pt) | 640 | 39.5 | 56.12 ± 0.82 ms | 1.55 ± 0.01 ms | 2.6 | 6.5 |
218
- | [YOLO11s](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s.pt) | 640 | 47.0 | 90.01 ± 1.17 ms | 2.46 ± 0.00 ms | 9.4 | 21.5 |
219
- | [YOLO11m](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m.pt) | 640 | 51.5 | 183.20 ± 2.04 ms | 4.70 ± 0.06 ms | 20.1 | 68.0 |
220
- | [YOLO11l](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l.pt) | 640 | 53.4 | 238.64 ± 1.39 ms | 6.16 ± 0.08 ms | 25.3 | 86.9 |
221
- | [YOLO11x](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x.pt) | 640 | 54.7 | 462.78 ± 6.66 ms | 11.31 ± 0.24 ms | 56.9 | 194.9 |
217
+ | [YOLO11n](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n.pt) | 640 | 39.5 | 56.1 ± 0.8 | 1.5 ± 0.0 | 2.6 | 6.5 |
218
+ | [YOLO11s](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s.pt) | 640 | 47.0 | 90.0 ± 1.2 | 2.5 ± 0.0 | 9.4 | 21.5 |
219
+ | [YOLO11m](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m.pt) | 640 | 51.5 | 183.2 ± 2.0 | 4.7 ± 0.1 | 20.1 | 68.0 |
220
+ | [YOLO11l](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l.pt) | 640 | 53.4 | 238.6 ± 1.4 | 6.2 ± 0.1 | 25.3 | 86.9 |
221
+ | [YOLO11x](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x.pt) | 640 | 54.7 | 462.8 ± 6.7 | 11.3 ± 0.2 | 56.9 | 194.9 |
222
222
 
223
223
  - **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val detect data=coco.yaml device=0`
224
224
  - **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val detect data=coco.yaml batch=1 device=0|cpu`
@@ -231,28 +231,45 @@ See [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage e
231
231
 
232
232
  | Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
233
233
  | -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
234
- | [YOLO11n-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-seg.pt) | 640 | 38.9 | 32.0 | 65.90 ± 1.14 ms | 1.84 ± 0.00 ms | 2.9 | 10.4 |
235
- | [YOLO11s-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-seg.pt) | 640 | 46.6 | 37.8 | 117.56 ± 4.89 ms | 2.94 ± 0.01 ms | 10.1 | 35.5 |
236
- | [YOLO11m-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-seg.pt) | 640 | 51.5 | 41.5 | 281.63 ± 1.16 ms | 6.31 ± 0.09 ms | 22.4 | 123.3 |
237
- | [YOLO11l-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-seg.pt) | 640 | 53.4 | 42.9 | 344.16 ± 3.17 ms | 7.78 ± 0.16 ms | 27.6 | 142.2 |
238
- | [YOLO11x-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-seg.pt) | 640 | 54.7 | 43.8 | 664.50 ± 3.24 ms | 15.75 ± 0.67 ms | 62.1 | 319.0 |
234
+ | [YOLO11n-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-seg.pt) | 640 | 38.9 | 32.0 | 65.9 ± 1.1 | 1.8 ± 0.0 | 2.9 | 10.4 |
235
+ | [YOLO11s-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-seg.pt) | 640 | 46.6 | 37.8 | 117.6 ± 4.9 | 2.9 ± 0.0 | 10.1 | 35.5 |
236
+ | [YOLO11m-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-seg.pt) | 640 | 51.5 | 41.5 | 281.6 ± 1.2 | 6.3 ± 0.1 | 22.4 | 123.3 |
237
+ | [YOLO11l-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-seg.pt) | 640 | 53.4 | 42.9 | 344.2 ± 3.2 | 7.8 ± 0.2 | 27.6 | 142.2 |
238
+ | [YOLO11x-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-seg.pt) | 640 | 54.7 | 43.8 | 664.5 ± 3.2 | 15.8 ± 0.7 | 62.1 | 319.0 |
239
239
 
240
240
  - **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val segment data=coco-seg.yaml device=0`
241
241
  - **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val segment data=coco-seg.yaml batch=1 device=0|cpu`
242
242
 
243
243
  </details>
244
244
 
245
+ <details><summary>Classification (ImageNet)</summary>
246
+
247
+ See [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usage examples with these models trained on [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), which include 1000 pretrained classes.
248
+
249
+ | Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) at 640 |
250
+ | -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ |
251
+ | [YOLO11n-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-cls.pt) | 224 | 70.0 | 89.4 | 5.0 ± 0.3 | 1.1 ± 0.0 | 1.6 | 3.3 |
252
+ | [YOLO11s-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-cls.pt) | 224 | 75.4 | 92.7 | 7.9 ± 0.2 | 1.3 ± 0.0 | 5.5 | 12.1 |
253
+ | [YOLO11m-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-cls.pt) | 224 | 77.3 | 93.9 | 17.2 ± 0.4 | 2.0 ± 0.0 | 10.4 | 39.3 |
254
+ | [YOLO11l-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-cls.pt) | 224 | 78.3 | 94.3 | 23.2 ± 0.3 | 2.8 ± 0.0 | 12.9 | 49.4 |
255
+ | [YOLO11x-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-cls.pt) | 224 | 79.5 | 94.9 | 41.4 ± 0.9 | 3.8 ± 0.0 | 28.4 | 110.4 |
256
+
257
+ - **acc** values are model accuracies on the [ImageNet](https://www.image-net.org/) dataset validation set. <br>Reproduce by `yolo val classify data=path/to/ImageNet device=0`
258
+ - **Speed** averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`
259
+
260
+ </details>
261
+
245
262
  <details><summary>Pose (COCO)</summary>
246
263
 
247
264
  See [Pose Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples with these models trained on [COCO-Pose](https://docs.ultralytics.com/datasets/pose/coco/), which include 1 pre-trained class, person.
248
265
 
249
266
  | Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
250
267
  | ---------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
251
- | [YOLO11n-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-pose.pt) | 640 | 50.0 | 81.0 | 52.40 ± 0.51 ms | 1.72 ± 0.01 ms | 2.9 | 7.6 |
252
- | [YOLO11s-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-pose.pt) | 640 | 58.9 | 86.3 | 90.54 ± 0.59 ms | 2.57 ± 0.00 ms | 9.9 | 23.2 |
253
- | [YOLO11m-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-pose.pt) | 640 | 64.9 | 89.4 | 187.28 ± 0.77 ms | 4.94 ± 0.05 ms | 20.9 | 71.7 |
254
- | [YOLO11l-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-pose.pt) | 640 | 66.1 | 89.9 | 247.69 ± 1.10 ms | 6.42 ± 0.13 ms | 26.2 | 90.7 |
255
- | [YOLO11x-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-pose.pt) | 640 | 69.5 | 91.1 | 487.97 ± 13.91 ms | 12.06 ± 0.20 ms | 58.8 | 203.3 |
268
+ | [YOLO11n-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-pose.pt) | 640 | 50.0 | 81.0 | 52.4 ± 0.5 | 1.7 ± 0.0 | 2.9 | 7.6 |
269
+ | [YOLO11s-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-pose.pt) | 640 | 58.9 | 86.3 | 90.5 ± 0.6 | 2.6 ± 0.0 | 9.9 | 23.2 |
270
+ | [YOLO11m-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-pose.pt) | 640 | 64.9 | 89.4 | 187.3 ± 0.8 | 4.9 ± 0.1 | 20.9 | 71.7 |
271
+ | [YOLO11l-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-pose.pt) | 640 | 66.1 | 89.9 | 247.7 ± 1.1 | 6.4 ± 0.1 | 26.2 | 90.7 |
272
+ | [YOLO11x-pose](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-pose.pt) | 640 | 69.5 | 91.1 | 488.0 ± 13.9 | 12.1 ± 0.2 | 58.8 | 203.3 |
256
273
 
257
274
  - **mAP<sup>val</sup>** values are for single-model single-scale on [COCO Keypoints val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val pose data=coco-pose.yaml device=0`
258
275
  - **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
@@ -265,34 +282,17 @@ See [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples with
265
282
 
266
283
  | Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
267
284
  | -------------------------------------------------------------------------------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
268
- | [YOLO11n-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-obb.pt) | 1024 | 78.4 | 117.56 ± 0.80 ms | 4.43 ± 0.01 ms | 2.7 | 17.2 |
269
- | [YOLO11s-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-obb.pt) | 1024 | 79.5 | 219.41 ± 4.00 ms | 5.13 ± 0.02 ms | 9.7 | 57.5 |
270
- | [YOLO11m-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-obb.pt) | 1024 | 80.9 | 562.81 ± 2.87 ms | 10.07 ± 0.38 ms | 20.9 | 183.5 |
271
- | [YOLO11l-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-obb.pt) | 1024 | 81.0 | 712.49 ± 4.98 ms | 13.46 ± 0.55 ms | 26.2 | 232.0 |
272
- | [YOLO11x-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-obb.pt) | 1024 | 81.3 | 1408.63 ± 7.67 ms | 28.59 ± 0.96 ms | 58.8 | 520.2 |
285
+ | [YOLO11n-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-obb.pt) | 1024 | 78.4 | 117.6 ± 0.8 | 4.4 ± 0.0 | 2.7 | 17.2 |
286
+ | [YOLO11s-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-obb.pt) | 1024 | 79.5 | 219.4 ± 4.0 | 5.1 ± 0.0 | 9.7 | 57.5 |
287
+ | [YOLO11m-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-obb.pt) | 1024 | 80.9 | 562.8 ± 2.9 | 10.1 ± 0.4 | 20.9 | 183.5 |
288
+ | [YOLO11l-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-obb.pt) | 1024 | 81.0 | 712.5 ± 5.0 | 13.5 ± 0.6 | 26.2 | 232.0 |
289
+ | [YOLO11x-obb](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-obb.pt) | 1024 | 81.3 | 1408.6 ± 7.7 | 28.6 ± 1.0 | 58.8 | 520.2 |
273
290
 
274
291
  - **mAP<sup>test</sup>** values are for single-model multiscale on [DOTAv1](https://captain-whu.github.io/DOTA/index.html) dataset. <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to [DOTA evaluation](https://captain-whu.github.io/DOTA/evaluation.html).
275
292
  - **Speed** averaged over DOTAv1 val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
276
293
 
277
294
  </details>
278
295
 
279
- <details><summary>Classification (ImageNet)</summary>
280
-
281
- See [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usage examples with these models trained on [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), which include 1000 pretrained classes.
282
-
283
- | Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) at 640 |
284
- | -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ |
285
- | [YOLO11n-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11n-cls.pt) | 224 | 70.0 | 89.4 | 5.03 ± 0.32 ms | 1.10 ± 0.01 ms | 1.6 | 3.3 |
286
- | [YOLO11s-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11s-cls.pt) | 224 | 75.4 | 92.7 | 7.89 ± 0.18 ms | 1.34 ± 0.01 ms | 5.5 | 12.1 |
287
- | [YOLO11m-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11m-cls.pt) | 224 | 77.3 | 93.9 | 17.17 ± 0.40 ms | 1.95 ± 0.00 ms | 10.4 | 39.3 |
288
- | [YOLO11l-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-cls.pt) | 224 | 78.3 | 94.3 | 23.17 ± 0.29 ms | 2.76 ± 0.00 ms | 12.9 | 49.4 |
289
- | [YOLO11x-cls](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-cls.pt) | 224 | 79.5 | 94.9 | 41.41 ± 0.94 ms | 3.82 ± 0.00 ms | 28.4 | 110.4 |
290
-
291
- - **acc** values are model accuracies on the [ImageNet](https://www.image-net.org/) dataset validation set. <br>Reproduce by `yolo val classify data=path/to/ImageNet device=0`
292
- - **Speed** averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`
293
-
294
- </details>
295
-
296
296
  ## <div align="center">Integrations</div>
297
297
 
298
298
  Our key integrations with leading AI platforms extend the functionality of Ultralytics' offerings, enhancing tasks like dataset labeling, training, visualization, and model management. Discover how Ultralytics, in collaboration with [Roboflow](https://roboflow.com/?ref=ultralytics), ClearML, [Comet](https://bit.ly/yolov8-readme-comet), Neural Magic and [OpenVINO](https://docs.ultralytics.com/integrations/openvino/), can optimize your AI workflow.
@@ -8,7 +8,7 @@ tests/test_exports.py,sha256=fpTKEVBUGLF3WiZPNKRs-IEcIY4cfxgvgKjUNfodjww,8042
8
8
  tests/test_integrations.py,sha256=f5-QCUk1SU_-qn4mBCZwS3GN3tXEBIIXo4z2EhExbHw,6126
9
9
  tests/test_python.py,sha256=I1RRdCwLdrc3jX06huVxct8HX8ccQOmQgVpuEflRl0U,23560
10
10
  tests/test_solutions.py,sha256=eAaLf1wM7IJ6DjT7NEw6sRaeDuTX0ZgsTjrI33XFCXE,3300
11
- ultralytics/__init__.py,sha256=tjagVFz_UrcZy9VrQs1MFlCri2UHRqEpXVaExqZQfKY,693
11
+ ultralytics/__init__.py,sha256=EBK5aoP9DP2M_QXggxoUlGqceIsrS3Pv0LXlAQforQU,693
12
12
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
13
13
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
14
14
  ultralytics/cfg/__init__.py,sha256=62PSSAa0W4-gAEcRNKoKbcxUWBeFNs0ss2O4XJQhOPY,33145
@@ -114,7 +114,7 @@ ultralytics/engine/validator.py,sha256=483Ad87Irk7IBlJNLu2SQAJsb7YriALTX9GIgriCm
114
114
  ultralytics/hub/__init__.py,sha256=3SKvZ5aRina3h94xMPQIB3D4maF62qFcyIqPPHRHNAc,5644
115
115
  ultralytics/hub/auth.py,sha256=kDLakGa2NbzvMAeXc2UdzZ65r0AH-XeM_JfsDY97WGk,5545
116
116
  ultralytics/hub/session.py,sha256=2KznO5kX14HFZ2-Ct9LoG312sdHuigQSLZb58MGvbJY,16411
117
- ultralytics/hub/utils.py,sha256=I7NATG6O_QRw7EU7EHkdTVvbCkwKCyUe54BP60To_so,9715
117
+ ultralytics/hub/utils.py,sha256=jBfuDJkOc8xCC-pjRFaC-x5GEfcS5Koua2bepHIU3SY,9705
118
118
  ultralytics/hub/google/__init__.py,sha256=uclNs-_5vAzQMgQKgl8eBvml1cx6IZYXRUhrF57v6_k,7504
119
119
  ultralytics/models/__init__.py,sha256=TT9iLCL_n9Y80dcUq0Fo-p-GRZCSU2vrWXM3CoMwqqE,265
120
120
  ultralytics/models/fastsam/__init__.py,sha256=W0rRSJM3vdxcsneuiN6_ajkUw86k6-opUKdLxVhKOoQ,203
@@ -192,7 +192,7 @@ ultralytics/solutions/object_counter.py,sha256=U66uvv_6QSol4-LY1E9JOZnYRYbek5Kz3
192
192
  ultralytics/solutions/parking_management.py,sha256=VgYyhoSEo7fnPegIhNUqnFL0jlMEevALx0QQbzJ3vGI,9049
193
193
  ultralytics/solutions/queue_management.py,sha256=yKPGc2-fN-lMpNddkxjN7xYGIJwMdoU-VIDRxQ1KPow,4869
194
194
  ultralytics/solutions/speed_estimation.py,sha256=c9OPGpDU9x6Dj4SobNc-sO90EZTPTGeKkW5u6C6Zj7g,4623
195
- ultralytics/solutions/streamlit_inference.py,sha256=MKf5P3O5oJwIKu2h_URvzaQjMWoSEMDMBwordplfRxo,5703
195
+ ultralytics/solutions/streamlit_inference.py,sha256=qA2EtwUC7ADOQ8P-zs3VPyrIoRArhcZz9CxkFbH63bw,5699
196
196
  ultralytics/trackers/__init__.py,sha256=j72IgH2dZHQArMPK4YwcV5ieIw94fYvlGdQjB9cOQKw,227
197
197
  ultralytics/trackers/basetrack.py,sha256=dXnXW3cxxd7lPm20JJCNO2voCIrQ4vhbNI1g4YEgn-Y,4423
198
198
  ultralytics/trackers/bot_sort.py,sha256=766grVQExvonb087Wy-SB32TSwYYsTEM22yoWeQ_EEo,10494
@@ -202,9 +202,9 @@ ultralytics/trackers/utils/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7J
202
202
  ultralytics/trackers/utils/gmc.py,sha256=VcURuY041qGCeWUGMxHZBr10T16LtcMqyv7AmTfE1MY,14557
203
203
  ultralytics/trackers/utils/kalman_filter.py,sha256=cH9zD3fwkuezP97H9mw8cSBN7a8hHKx_Sx1j7t3oYGs,21349
204
204
  ultralytics/trackers/utils/matching.py,sha256=3Ie1WNNRZ4_q3365F03XD7Nr9juZB_08mw4yUKC3w74,7162
205
- ultralytics/utils/__init__.py,sha256=q8YdOKbfIccEs26krg97fmOKbOUC__O3mdzzFQzgUqE,48340
205
+ ultralytics/utils/__init__.py,sha256=R2VpuwIfwpTSTX2T_MFdW1tNdX27FZW5XAH984tjR1Q,48834
206
206
  ultralytics/utils/autobatch.py,sha256=AXboYfNSnTGsYj5FmgGYPQd0crfkeleyms6QXQfZGQ4,4194
207
- ultralytics/utils/benchmarks.py,sha256=IN6ZqU-1DVHnwRsdgS_vcBhng8DUMRIEjEEgdrl1mdY,25101
207
+ ultralytics/utils/benchmarks.py,sha256=8FYp5WPzcxcDaeg8ol2sgzRBHVGYatEO7f3MrmPF6nI,25097
208
208
  ultralytics/utils/checks.py,sha256=tiwVY1SCf7AlDOUQDh6fJlmhQ3CxQEqLUrXRvwRBoKs,28998
209
209
  ultralytics/utils/dist.py,sha256=NDFga-uKxkBX2zLxFHSene_cCiGQJoyOeCXcN9JIOIk,2358
210
210
  ultralytics/utils/downloads.py,sha256=97JitihZqvIMS6_TX5rJAG7BI8eYHlu5g8YXlI0RkR4,21998
@@ -215,7 +215,7 @@ ultralytics/utils/loss.py,sha256=SW3FVFFp8Ki_LCT8wIdFbm6KmyPcQn3RmKNcvVAhMQI,341
215
215
  ultralytics/utils/metrics.py,sha256=UgLGudWp57uXDMlMUJy4gsz6cfVjcq7tYmHeto3TqvM,53927
216
216
  ultralytics/utils/ops.py,sha256=dsXNdyrYx_p6io6zezig9p84dxS7U-10vceHNVu2IL0,32888
217
217
  ultralytics/utils/patches.py,sha256=J-iOwIRbfUs-inBZerhnXby5tUKjYcOIyvhLTS352JE,3270
218
- ultralytics/utils/plotting.py,sha256=4ow_6Pn8REgQA_qXThvwLxOKg3OrewcuCAPq9DylocY,62094
218
+ ultralytics/utils/plotting.py,sha256=Sqs9Q7mhenCsFed_oyw_64wgvd0TTae9L3Lc4g2_lSI,62296
219
219
  ultralytics/utils/tal.py,sha256=ECsu95xEqOItmxMDN4YTD3FsUiIsQNWy0pZC3TfvFfk,16877
220
220
  ultralytics/utils/torch_utils.py,sha256=tqOyNnUZbLBOIueSWwljZua65cz6_RvClxYv8gNHIw0,29673
221
221
  ultralytics/utils/triton.py,sha256=gg1finxno_tY2Ge9PMhmu7PI9wvoFZoiicdT4Bhqv3w,3936
@@ -231,9 +231,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=5Z3ua5YBTUS56FH8VQKQG1aaIo9fH8GEyz
231
231
  ultralytics/utils/callbacks/raytune.py,sha256=ODVYzy-CoM4Uge0zjkh3Hnh9nF2M0vhDrSenXnvcizw,705
232
232
  ultralytics/utils/callbacks/tensorboard.py,sha256=0kn4IR10no99UCIheojWRujgybmUHSx5fPI6Vsq6l_g,4135
233
233
  ultralytics/utils/callbacks/wb.py,sha256=9-fjQIdLjr3b73DTE3rHO171KvbH1VweJ-bmbv-rqTw,6747
234
- ultralytics-8.3.2.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
235
- ultralytics-8.3.2.dist-info/METADATA,sha256=cjJvRz_nxBld56qrZEJDXuQn-C3lbUVzMIHtzs0n89U,34574
236
- ultralytics-8.3.2.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
237
- ultralytics-8.3.2.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
238
- ultralytics-8.3.2.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
239
- ultralytics-8.3.2.dist-info/RECORD,,
234
+ ultralytics-8.3.3.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
235
+ ultralytics-8.3.3.dist-info/METADATA,sha256=K4q0V89-JTwWjGWue29_CsVQH9AhLpZVmt3b61x-iMc,34574
236
+ ultralytics-8.3.3.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
237
+ ultralytics-8.3.3.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
238
+ ultralytics-8.3.3.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
239
+ ultralytics-8.3.3.dist-info/RECORD,,