ultralytics-opencv-headless 8.4.1__py3-none-any.whl → 8.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. tests/test_exports.py +0 -2
  2. ultralytics/__init__.py +1 -1
  3. ultralytics/cfg/__init__.py +19 -21
  4. ultralytics/data/annotator.py +2 -2
  5. ultralytics/data/converter.py +57 -38
  6. ultralytics/engine/exporter.py +22 -22
  7. ultralytics/engine/model.py +33 -33
  8. ultralytics/engine/predictor.py +17 -17
  9. ultralytics/engine/results.py +14 -12
  10. ultralytics/engine/trainer.py +27 -22
  11. ultralytics/engine/tuner.py +4 -4
  12. ultralytics/engine/validator.py +16 -16
  13. ultralytics/models/yolo/classify/predict.py +1 -1
  14. ultralytics/models/yolo/classify/train.py +1 -1
  15. ultralytics/models/yolo/classify/val.py +1 -1
  16. ultralytics/models/yolo/detect/predict.py +2 -2
  17. ultralytics/models/yolo/detect/train.py +1 -1
  18. ultralytics/models/yolo/detect/val.py +1 -1
  19. ultralytics/models/yolo/model.py +7 -7
  20. ultralytics/models/yolo/obb/predict.py +1 -1
  21. ultralytics/models/yolo/obb/train.py +2 -2
  22. ultralytics/models/yolo/obb/val.py +1 -1
  23. ultralytics/models/yolo/pose/predict.py +1 -1
  24. ultralytics/models/yolo/pose/train.py +4 -2
  25. ultralytics/models/yolo/pose/val.py +1 -1
  26. ultralytics/models/yolo/segment/predict.py +2 -2
  27. ultralytics/models/yolo/segment/train.py +3 -3
  28. ultralytics/models/yolo/segment/val.py +1 -1
  29. ultralytics/nn/autobackend.py +2 -2
  30. ultralytics/nn/modules/head.py +1 -1
  31. ultralytics/nn/tasks.py +12 -12
  32. ultralytics/solutions/ai_gym.py +3 -3
  33. ultralytics/solutions/config.py +1 -1
  34. ultralytics/solutions/heatmap.py +1 -1
  35. ultralytics/solutions/instance_segmentation.py +2 -2
  36. ultralytics/solutions/parking_management.py +1 -1
  37. ultralytics/solutions/solutions.py +2 -2
  38. ultralytics/trackers/track.py +1 -1
  39. ultralytics/utils/__init__.py +8 -8
  40. ultralytics/utils/benchmarks.py +23 -23
  41. ultralytics/utils/callbacks/platform.py +11 -9
  42. ultralytics/utils/checks.py +6 -6
  43. ultralytics/utils/downloads.py +2 -2
  44. ultralytics/utils/export/imx.py +3 -8
  45. ultralytics/utils/files.py +2 -2
  46. ultralytics/utils/loss.py +3 -3
  47. ultralytics/utils/tuner.py +2 -2
  48. {ultralytics_opencv_headless-8.4.1.dist-info → ultralytics_opencv_headless-8.4.3.dist-info}/METADATA +36 -36
  49. {ultralytics_opencv_headless-8.4.1.dist-info → ultralytics_opencv_headless-8.4.3.dist-info}/RECORD +53 -53
  50. {ultralytics_opencv_headless-8.4.1.dist-info → ultralytics_opencv_headless-8.4.3.dist-info}/WHEEL +0 -0
  51. {ultralytics_opencv_headless-8.4.1.dist-info → ultralytics_opencv_headless-8.4.3.dist-info}/entry_points.txt +0 -0
  52. {ultralytics_opencv_headless-8.4.1.dist-info → ultralytics_opencv_headless-8.4.3.dist-info}/licenses/LICENSE +0 -0
  53. {ultralytics_opencv_headless-8.4.1.dist-info → ultralytics_opencv_headless-8.4.3.dist-info}/top_level.txt +0 -0
@@ -104,15 +104,10 @@ class FXModel(torch.nn.Module):
104
104
  return x
105
105
 
106
106
 
107
- def _inference(self, x: list[torch.Tensor] | dict[str, torch.Tensor]) -> tuple[torch.Tensor]:
107
+ def _inference(self, x: dict[str, torch.Tensor]) -> tuple[torch.Tensor]:
108
108
  """Decode boxes and cls scores for imx object detection."""
109
- if isinstance(x, dict):
110
- box, cls = x["boxes"], x["scores"]
111
- else:
112
- x_cat = torch.cat([xi.view(x[0].shape[0], self.no, -1) for xi in x], 2)
113
- box, cls = x_cat.split((self.reg_max * 4, self.nc), 1)
114
- dbox = self.decode_bboxes(self.dfl(box), self.anchors.unsqueeze(0)) * self.strides
115
- return dbox.transpose(1, 2), cls.sigmoid().permute(0, 2, 1)
109
+ dbox = self.decode_bboxes(self.dfl(x["boxes"]), self.anchors.unsqueeze(0)) * self.strides
110
+ return dbox.transpose(1, 2), x["scores"].sigmoid().permute(0, 2, 1)
116
111
 
117
112
 
118
113
  def pose_forward(self, x: list[torch.Tensor]) -> tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
@@ -180,7 +180,7 @@ def get_latest_run(search_dir: str = ".") -> str:
180
180
  return max(last_list, key=os.path.getctime) if last_list else ""
181
181
 
182
182
 
183
- def update_models(model_names: tuple = ("yolo11n.pt",), source_dir: Path = Path("."), update_names: bool = False):
183
+ def update_models(model_names: tuple = ("yolo26n.pt",), source_dir: Path = Path("."), update_names: bool = False):
184
184
  """Update and re-save specified YOLO models in an 'updated_models' subdirectory.
185
185
 
186
186
  Args:
@@ -191,7 +191,7 @@ def update_models(model_names: tuple = ("yolo11n.pt",), source_dir: Path = Path(
191
191
  Examples:
192
192
  Update specified YOLO models and save them in 'updated_models' subdirectory:
193
193
  >>> from ultralytics.utils.files import update_models
194
- >>> model_names = ("yolo11n.pt", "yolov8s.pt")
194
+ >>> model_names = ("yolo26n.pt", "yolo11s.pt")
195
195
  >>> update_models(model_names, source_dir=Path("/models"), update_names=True)
196
196
  """
197
197
  from ultralytics import YOLO
ultralytics/utils/loss.py CHANGED
@@ -834,7 +834,7 @@ class PoseLoss26(v8PoseLoss):
834
834
  if self.rle_loss is not None:
835
835
  loss[5] *= self.hyp.rle # rle gain
836
836
 
837
- return loss * batch_size, loss.detach() # loss(box, cls, dfl)
837
+ return loss * batch_size, loss.detach() # loss(box, cls, dfl, kpt_location, kpt_visibility)
838
838
 
839
839
  @staticmethod
840
840
  def kpts_decode(anchor_points: torch.Tensor, pred_kpts: torch.Tensor) -> torch.Tensor:
@@ -982,7 +982,7 @@ class v8OBBLoss(v8DetectionLoss):
982
982
 
983
983
  def loss(self, preds: dict[str, torch.Tensor], batch: dict[str, torch.Tensor]) -> tuple[torch.Tensor, torch.Tensor]:
984
984
  """Calculate and return the loss for oriented bounding box detection."""
985
- loss = torch.zeros(4, device=self.device) # box, cls, dfl
985
+ loss = torch.zeros(4, device=self.device) # box, cls, dfl, angle
986
986
  pred_distri, pred_scores, pred_angle = (
987
987
  preds["boxes"].permute(0, 2, 1).contiguous(),
988
988
  preds["scores"].permute(0, 2, 1).contiguous(),
@@ -1007,7 +1007,7 @@ class v8OBBLoss(v8DetectionLoss):
1007
1007
  raise TypeError(
1008
1008
  "ERROR ❌ OBB dataset incorrectly formatted or not a OBB dataset.\n"
1009
1009
  "This error can occur when incorrectly training a 'OBB' model on a 'detect' dataset, "
1010
- "i.e. 'yolo train model=yolo11n-obb.pt data=dota8.yaml'.\nVerify your dataset is a "
1010
+ "i.e. 'yolo train model=yolo26n-obb.pt data=dota8.yaml'.\nVerify your dataset is a "
1011
1011
  "correctly formatted 'OBB' dataset using 'data=dota8.yaml' "
1012
1012
  "as an example.\nSee https://docs.ultralytics.com/datasets/obb/ for help."
1013
1013
  ) from e
@@ -29,9 +29,9 @@ def run_ray_tune(
29
29
 
30
30
  Examples:
31
31
  >>> from ultralytics import YOLO
32
- >>> model = YOLO("yolo11n.pt") # Load a YOLO11n model
32
+ >>> model = YOLO("yolo26n.pt") # Load a YOLO26n model
33
33
 
34
- Start tuning hyperparameters for YOLO11n training on the COCO8 dataset
34
+ Start tuning hyperparameters for YOLO26n training on the COCO8 dataset
35
35
  >>> result_grid = model.tune(data="coco8.yaml", use_ray=True)
36
36
  """
37
37
  LOGGER.info("💡 Learn about RayTune at https://docs.ultralytics.com/integrations/ray-tune")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ultralytics-opencv-headless
3
- Version: 8.4.1
3
+ Version: 8.4.3
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -218,13 +218,13 @@ Ultralytics supports a wide range of YOLO models, from early versions like [YOLO
218
218
 
219
219
  Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examples. These models are trained on the [COCO dataset](https://cocodataset.org/), featuring 80 object classes.
220
220
 
221
- | Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
222
- | ------------------------------------------------------------------------------------ | --------------------- | -------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
223
- | [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640 | 40.9 | 38.9 ± 0.7 | 1.7 ± 0.0 | 2.4 | 5.4 |
224
- | [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640 | 48.6 | 87.2 ± 0.9 | 2.5 ± 0.0 | 9.5 | 20.7 |
225
- | [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640 | 53.1 | 220.0 ± 1.4 | 4.7 ± 0.1 | 20.4 | 68.2 |
226
- | [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640 | 55.0 | 286.2 ± 2.0 | 6.2 ± 0.2 | 24.8 | 86.4 |
227
- | [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640 | 57.5 | 525.8 ± 4.0 | 11.8 ± 0.2 | 55.7 | 193.9 |
221
+ | Model | size<br><sup>(pixels)</sup> | mAP<sup>val<br>50-95</sup> | mAP<sup>val<br>50-95(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
222
+ | ------------------------------------------------------------------------------------ | --------------------------- | -------------------------- | ------------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
223
+ | [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640 | 40.9 | 40.1 | 38.9 ± 0.7 | 1.7 ± 0.0 | 2.4 | 5.4 |
224
+ | [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640 | 48.6 | 47.8 | 87.2 ± 0.9 | 2.5 ± 0.0 | 9.5 | 20.7 |
225
+ | [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640 | 53.1 | 52.5 | 220.0 ± 1.4 | 4.7 ± 0.1 | 20.4 | 68.2 |
226
+ | [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640 | 55.0 | 54.4 | 286.2 ± 2.0 | 6.2 ± 0.2 | 24.8 | 86.4 |
227
+ | [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640 | 57.5 | 56.9 | 525.8 ± 4.0 | 11.8 ± 0.2 | 55.7 | 193.9 |
228
228
 
229
229
  - **mAP<sup>val</sup>** values refer to single-model single-scale performance on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val detect data=coco.yaml device=0`
230
230
  - **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val detect data=coco.yaml batch=1 device=0|cpu`
@@ -235,13 +235,13 @@ Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usa
235
235
 
236
236
  Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage examples. These models are trained on [COCO-Seg](https://docs.ultralytics.com/datasets/segment/coco/), including 80 classes.
237
237
 
238
- | Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
239
- | -------------------------------------------------------------------------------------------- | --------------------- | -------------------- | --------------------- | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
240
- | [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.7 | 9.1 |
241
- | [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.4 | 34.2 |
242
- | [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 23.6 | 121.5 |
243
- | [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 28.0 | 139.8 |
244
- | [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 62.8 | 313.5 |
238
+ | Model | size<br><sup>(pixels)</sup> | mAP<sup>box<br>50-95(e2e)</sup> | mAP<sup>mask<br>50-95(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
239
+ | -------------------------------------------------------------------------------------------- | --------------------------- | ------------------------------- | -------------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
240
+ | [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.7 | 9.1 |
241
+ | [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.4 | 34.2 |
242
+ | [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 23.6 | 121.5 |
243
+ | [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 28.0 | 139.8 |
244
+ | [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 62.8 | 313.5 |
245
245
 
246
246
  - **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val segment data=coco.yaml device=0`
247
247
  - **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val segment data=coco.yaml batch=1 device=0|cpu`
@@ -252,13 +252,13 @@ Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) fo
252
252
 
253
253
  Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usage examples. These models are trained on [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), covering 1000 classes.
254
254
 
255
- | Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) at 224 |
256
- | -------------------------------------------------------------------------------------------- | --------------------- | ---------------- | ---------------- | ------------------------------ | ----------------------------------- | ------------------ | ------------------------ |
257
- | [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224 | 71.4 | 90.1 | 5.0 ± 0.3 | 1.1 ± 0.0 | 2.8 | 0.5 |
258
- | [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224 | 76.0 | 92.9 | 7.9 ± 0.2 | 1.3 ± 0.0 | 6.7 | 1.6 |
259
- | [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224 | 78.1 | 94.2 | 17.2 ± 0.4 | 2.0 ± 0.0 | 11.6 | 4.9 |
260
- | [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224 | 79.0 | 94.6 | 23.2 ± 0.3 | 2.8 ± 0.0 | 14.1 | 6.2 |
261
- | [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224 | 79.9 | 95.0 | 41.4 ± 0.9 | 3.8 ± 0.0 | 29.6 | 13.6 |
255
+ | Model | size<br><sup>(pixels)</sup> | acc<br><sup>top1</sup> | acc<br><sup>top5</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B) at 224</sup> |
256
+ | -------------------------------------------------------------------------------------------- | --------------------------- | ---------------------- | ---------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ------------------------------ |
257
+ | [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224 | 71.4 | 90.1 | 5.0 ± 0.3 | 1.1 ± 0.0 | 2.8 | 0.5 |
258
+ | [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224 | 76.0 | 92.9 | 7.9 ± 0.2 | 1.3 ± 0.0 | 6.7 | 1.6 |
259
+ | [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224 | 78.1 | 94.2 | 17.2 ± 0.4 | 2.0 ± 0.0 | 11.6 | 4.9 |
260
+ | [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224 | 79.0 | 94.6 | 23.2 ± 0.3 | 2.8 ± 0.0 | 14.1 | 6.2 |
261
+ | [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224 | 79.9 | 95.0 | 41.4 ± 0.9 | 3.8 ± 0.0 | 29.6 | 13.6 |
262
262
 
263
263
  - **acc** values represent model accuracy on the [ImageNet](https://www.image-net.org/) dataset validation set. <br>Reproduce with `yolo val classify data=path/to/ImageNet device=0`
264
264
  - **Speed** metrics are averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`
@@ -269,13 +269,13 @@ Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/)
269
269
 
270
270
  See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples. These models are trained on [COCO-Pose](https://docs.ultralytics.com/datasets/pose/coco/), focusing on the 'person' class.
271
271
 
272
- | Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
273
- | ---------------------------------------------------------------------------------------------- | --------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
274
- | [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 57.2 | 83.3 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
275
- | [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.0 | 86.6 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
276
- | [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.6 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
277
- | [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.5 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
278
- | [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.6 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
272
+ | Model | size<br><sup>(pixels)</sup> | mAP<sup>pose<br>50-95(e2e)</sup> | mAP<sup>pose<br>50(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
273
+ | ---------------------------------------------------------------------------------------------- | --------------------------- | -------------------------------- | ----------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
274
+ | [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 57.2 | 83.3 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
275
+ | [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.0 | 86.6 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
276
+ | [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.6 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
277
+ | [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.5 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
278
+ | [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.6 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
279
279
 
280
280
  - **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO Keypoints val2017](https://docs.ultralytics.com/datasets/pose/coco/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val pose data=coco-pose.yaml device=0`
281
281
  - **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
@@ -286,13 +286,13 @@ See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usa
286
286
 
287
287
  Check the [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples. These models are trained on [DOTAv1](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10/), including 15 classes.
288
288
 
289
- | Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
290
- | -------------------------------------------------------------------------------------------- | --------------------- | ------------------ | ------------------------------ | ----------------------------------- | ------------------ | ----------------- |
291
- | [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
292
- | [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 80.9 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
293
- | [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
294
- | [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 81.6 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
295
- | [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 81.7 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
289
+ | Model | size<br><sup>(pixels)</sup> | mAP<sup>test<br>50-95(e2e)</sup> | mAP<sup>test<br>50(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
290
+ | -------------------------------------------------------------------------------------------- | --------------------------- | -------------------------------- | ----------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
291
+ | [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 52.4 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
292
+ | [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 54.8 | 80.9 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
293
+ | [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 55.3 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
294
+ | [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 56.2 | 81.6 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
295
+ | [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 56.7 | 81.7 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
296
296
 
297
297
  - **mAP<sup>test</sup>** values are for single-model multiscale performance on the [DOTAv1 test set](https://captain-whu.github.io/DOTA/dataset.html). <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to the [DOTA evaluation server](https://captain-whu.github.io/DOTA/evaluation.html).
298
298
  - **Speed** metrics are averaged over [DOTAv1 val images](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10) using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
@@ -3,15 +3,15 @@ tests/conftest.py,sha256=rlKyDuOC_3ptXrWS8Q19bNEGOupUmYXHj3nB6o1GBGY,2318
3
3
  tests/test_cli.py,sha256=GhIFHi-_WIJpDgoGNRi0DnjbfwP1wHbklBMnkCM-P_4,5464
4
4
  tests/test_cuda.py,sha256=2TBe-ZkecMOGPWLdHcbsAjH3m9c5SQJ2KeyICgS0aeo,8426
5
5
  tests/test_engine.py,sha256=ufSn3X4kL_Lpn2O25jKAfw_9QwHTMRjP9shDdpgBqnY,5740
6
- tests/test_exports.py,sha256=j1o0DYeHM2ulXv1UPkHFcZFWGv8ichY7KHhrb4U89QI,14894
6
+ tests/test_exports.py,sha256=Toy4u-4bsoyAbzNhc9kbMuKqvMKywZxNj5jlFNTzFWs,14670
7
7
  tests/test_integrations.py,sha256=FjvTGjXm3bvYHK3_obgObhC5SzHCTzw4aOJV9Hh08jQ,6220
8
8
  tests/test_python.py,sha256=np6on3Sa0NNi5pquvilekjKxxedAJMpLOQEthGaIalQ,29284
9
9
  tests/test_solutions.py,sha256=1tRlM72YciE42Nk9v83gsXOD5RSx9GSWVsKGhH7-HxE,14122
10
- ultralytics/__init__.py,sha256=v6vc7C81LzYC2a4F7ZEElHt1Wi-uMsbkbC59Zh_dHvw,1300
10
+ ultralytics/__init__.py,sha256=cei9ajuLEweE4RyDoGwvanjvIJe8Z347vRJatW87-JI,1300
11
11
  ultralytics/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
12
12
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
13
13
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
14
- ultralytics/cfg/__init__.py,sha256=ihvr4XZnnbYWFwSYtFxWdNK7fN4D1Bm19XLkD_-bCXo,40401
14
+ ultralytics/cfg/__init__.py,sha256=w-ZCBR5lbL2ef0UhQfK7IvySIOhtPcJTJF0-3tAXObw,40300
15
15
  ultralytics/cfg/default.yaml,sha256=E__q2msvK9XCQngf0YFLpueCer_1tRcMJM0p3ahBdbA,9015
16
16
  ultralytics/cfg/datasets/Argoverse.yaml,sha256=QGpdh3Hj5dFrvbsaE_8rAVj9BO4XpKTB7uhXaTTnE-o,3364
17
17
  ultralytics/cfg/datasets/DOTAv1.5.yaml,sha256=KE7VC-ZMDSei1pLPm-pdk_ZAMRU_gLwGgtIQNbwp6dA,1212
@@ -117,11 +117,11 @@ ultralytics/cfg/models/v9/yolov9t.yaml,sha256=Q8GpSXE7fumhuJiQg4a2SkuS_UmnXqp-eo
117
117
  ultralytics/cfg/trackers/botsort.yaml,sha256=tRxC-qT4Wz0mLn5x7ZEwrqgGKrmTDVY7gMge-mhpe7U,1431
118
118
  ultralytics/cfg/trackers/bytetrack.yaml,sha256=7LS1ObP5u7BUFcmeY6L2m3bRuPUktnpJspFKd_ElVWc,908
119
119
  ultralytics/data/__init__.py,sha256=ToR8zl0JhBHy42ZvV7zIwO_F3lbi5oNlGQNPK3dlddU,644
120
- ultralytics/data/annotator.py,sha256=kbfSPBesKEVK6ys3dilTdMh7rCKyp0xV7tGQeEDbpWI,2985
120
+ ultralytics/data/annotator.py,sha256=iu1En-LzlR4RyR3ocftthnAog_peQHV9ForPRo_QcX8,2985
121
121
  ultralytics/data/augment.py,sha256=4xtggkuysYcbK5pYwNuAaoCzshb5wwD9KN6_pP4uSFU,128003
122
122
  ultralytics/data/base.py,sha256=pMs8yJOmAFPXdgfLCDtUemSvkPNDzxReP-fWzkNtonc,19723
123
123
  ultralytics/data/build.py,sha256=s-tkSZPf3OfQyfXPXB9XxdW_gIcU6Xy_u21ekSgTnRo,17205
124
- ultralytics/data/converter.py,sha256=1m345J7YUn7gtaChO7To4BWZm72pC8D8L2O0k99q0DE,31898
124
+ ultralytics/data/converter.py,sha256=KUFVQuesnABjm7nW90kxQ6WeYavbo7AC7ZtfuxGvPE4,33107
125
125
  ultralytics/data/dataset.py,sha256=r_BZy4FwMZ-dYkaJiz1E3jr2pI6dn7V3hZwf2RM9_RQ,36536
126
126
  ultralytics/data/loaders.py,sha256=BQbhgjiLCGcRBPkGVG9Hr1jeNfG1nuZD3jstiWb7zS8,31889
127
127
  ultralytics/data/split.py,sha256=HpR0ltf5oN1DpZstavFbBFC1YdpGPaATXxDOcAMwOqc,5101
@@ -132,13 +132,13 @@ ultralytics/data/scripts/get_coco.sh,sha256=UuJpJeo3qQpTHVINeOpmP0NYmg8PhEFE3A8J
132
132
  ultralytics/data/scripts/get_coco128.sh,sha256=qmRQl_hOKrsdHrTrnyQuFIH01oDz3lfaz138OgGfLt8,650
133
133
  ultralytics/data/scripts/get_imagenet.sh,sha256=hr42H16bM47iT27rgS7MpEo-GeOZAYUQXgr0B2cwn48,1705
134
134
  ultralytics/engine/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
135
- ultralytics/engine/exporter.py,sha256=SpA0Oj4w8yjYUde1okc4XfyCK376t1zZPr-bx1-p_WE,73429
136
- ultralytics/engine/model.py,sha256=bKoiy8ImddK-e87NmVbO5nlktqgebRM7D65epD4Cvjk,53211
137
- ultralytics/engine/predictor.py,sha256=neYmNDX27Vv3ggk9xqaKlH6XzB2vlFIghU5o7ZC0zFo,22838
138
- ultralytics/engine/results.py,sha256=DomI01voqR_i7v8LhDGb6jWCprWB4H6I436GSO2NMBY,68030
139
- ultralytics/engine/trainer.py,sha256=W8xFyTBZ_hFRvzMccugqLw3dBXNfRH1d8KoRO4DWFcE,46985
140
- ultralytics/engine/tuner.py,sha256=mD4bjddz7CE7ExKgEaIoSQw22Lg9V0NBXqR9Vey2gIs,21840
141
- ultralytics/engine/validator.py,sha256=2rqdVt4hB9ruMJq-L7PbaCNFwuERS7ZHdVSg91RM3wk,17761
135
+ ultralytics/engine/exporter.py,sha256=RCViSGpUjFyUEW6GxhbKfURBW5vCOgCcXtLJKKI8ceM,73429
136
+ ultralytics/engine/model.py,sha256=euDHUy7J5vVBvS_d-KbGZd_0BP5bF6Y3cTQ7VXtwZ4k,53210
137
+ ultralytics/engine/predictor.py,sha256=tXrHSTHJ-rDQ3lrPW9P5_ei_ewTwbY2sji6MExybJ28,22838
138
+ ultralytics/engine/results.py,sha256=uvD7WqaePkuYbcf-iFqh3DIy5_ZSyHeDiKIzY5VjePM,68181
139
+ ultralytics/engine/trainer.py,sha256=8kAqel2aF2_GZ9Bi-lyf7ykWW_vkGD6dx8z5bPU6zlM,47219
140
+ ultralytics/engine/tuner.py,sha256=F4fyQaC5_GT74TULRO0VhzTv2S_a54cZDc3FjFoqaHE,21840
141
+ ultralytics/engine/validator.py,sha256=DiKsygbNJdRdwXoKoYOJA6bP_T7vMW3Syj_Qc_l7xTM,17761
142
142
  ultralytics/hub/__init__.py,sha256=Z0K_E00jzQh90b18q3IDChwVmTvyIYp6C00sCV-n2F8,6709
143
143
  ultralytics/hub/auth.py,sha256=ANzCeZA7lUzTWc_sFHbDuuyBh1jLl2sTpHkoUbIkFYE,6254
144
144
  ultralytics/hub/session.py,sha256=OzBXAL9R135gRDdfNYUqyiSrxOyaiMFCVYSZua99sF0,18364
@@ -189,27 +189,27 @@ ultralytics/models/utils/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXp
189
189
  ultralytics/models/utils/loss.py,sha256=9CcqRXDj5-I-7eZuenInvyoLcPf22Ynf3rUFA5V22bI,21131
190
190
  ultralytics/models/utils/ops.py,sha256=z-Ebjv_k14bWOoP6nszDzDBiy3yELcVtbj6M8PsRpvE,15207
191
191
  ultralytics/models/yolo/__init__.py,sha256=YD407NDDiyjo0x_MR6usJaTpePKPgsfBUYehlCw7lRs,307
192
- ultralytics/models/yolo/model.py,sha256=BbVMlUNrnjPmS9OZbETSlmUjCYxdUwYyiY3I2TtLAqw,18504
192
+ ultralytics/models/yolo/model.py,sha256=HXkglzJQqW1x7MJaKavI5aasA-0lSH21Xcv_dac3SFU,18504
193
193
  ultralytics/models/yolo/classify/__init__.py,sha256=9--HVaNOfI1K7rn_rRqclL8FUAnpfeBrRqEQIaQw2xM,383
194
- ultralytics/models/yolo/classify/predict.py,sha256=wKICjwofH7-7QLJhX2vYSNJXWu2-5kWzjoXXmUPI0pU,4137
195
- ultralytics/models/yolo/classify/train.py,sha256=oODDfPwjgKzsbpO7NCYnOp_uwkWD7HNLhvsHxAJTA4g,8958
196
- ultralytics/models/yolo/classify/val.py,sha256=gtoUJN5_-56EbiYp5Ur-shfdBNMJOqToWmup_-1wW7I,10503
194
+ ultralytics/models/yolo/classify/predict.py,sha256=HCStYkSqeg32SNTWfr4FDCkUMQ4wnKqceUK3T995us4,4137
195
+ ultralytics/models/yolo/classify/train.py,sha256=41ZxaIJkzkRxfgq6VffFX5Xfsrm9tNv3i3bdtUPAocE,8958
196
+ ultralytics/models/yolo/classify/val.py,sha256=akH2P3nff4oiZtV2toKB3Z9HIbsVcwsb1uvDwhamszw,10503
197
197
  ultralytics/models/yolo/detect/__init__.py,sha256=GIRsLYR-kT4JJx7lh4ZZAFGBZj0aebokuU0A7JbjDVA,257
198
- ultralytics/models/yolo/detect/predict.py,sha256=Sct-UwkDe54ZmVtTYl0-fKgx_0BOlPBUsr4NodFd-eU,5385
199
- ultralytics/models/yolo/detect/train.py,sha256=jWWzOvvcfb6s8HXMKi6l1sr1QCslN3GsqzQQ51OSpJk,10519
200
- ultralytics/models/yolo/detect/val.py,sha256=NXSeeXtucOMHaR64GSYjxoss5Lhqh3qtbDKJ-crf2Do,22818
198
+ ultralytics/models/yolo/detect/predict.py,sha256=2nxlMyw_zVKq1aeJFRTgb4EGL2vOFq4pLT9tArHBfF8,5385
199
+ ultralytics/models/yolo/detect/train.py,sha256=ffM3ULnR9Kbw_1yBq2I6BWa7V124lfQtU0_C_GHhwRI,10519
200
+ ultralytics/models/yolo/detect/val.py,sha256=54AOR6r3istE0pILJ1v4xzPdv7UcvtTEZ6E5OGj3Jgc,22818
201
201
  ultralytics/models/yolo/obb/__init__.py,sha256=tQmpG8wVHsajWkZdmD6cjGohJ4ki64iSXQT8JY_dydo,221
202
- ultralytics/models/yolo/obb/predict.py,sha256=K7KtQKA-7JVLxLSOZ-X38YepZkUAOH5rUwHidm7geYU,2577
203
- ultralytics/models/yolo/obb/train.py,sha256=6lFDUhAYrIJeDZz7A7ZgPkoDPY4b-0Aqb8noFpCH1Ck,3452
204
- ultralytics/models/yolo/obb/val.py,sha256=XkZhjPqF7bdYotyUTnRCj6Zre6QsB1M3ulZ0DMf-xiE,14513
202
+ ultralytics/models/yolo/obb/predict.py,sha256=I7hWDr1zuy2WuwGom9uzXqomfr7qVMWb7iRl18xdTYw,2577
203
+ ultralytics/models/yolo/obb/train.py,sha256=HEDdPiP-yBbrUQWllcD1rc3gGrbzQmT6RBMTGtmVOu0,3452
204
+ ultralytics/models/yolo/obb/val.py,sha256=qYNe7ZcW3rhTLYPw15OeGfBaqaa_f1ADs4FF21h32e4,14513
205
205
  ultralytics/models/yolo/pose/__init__.py,sha256=_9OFLj19XwvJHBRxQtVW5CV7rvJ_3hDPE97miit0sPc,227
206
- ultralytics/models/yolo/pose/predict.py,sha256=rsorTRpyL-x40R2QVDDG2isc1e2F2lGfD13oKaD5ANs,3118
207
- ultralytics/models/yolo/pose/train.py,sha256=fy0XE3sC3Ue-kzCyi9rNz3zygMqlfZuZiDFyrD8d6cs,4640
208
- ultralytics/models/yolo/pose/val.py,sha256=s5WmXcZI5cAi3LPdIVHnkFUbEoFZsw5PBnnLnZ3Ep_c,12004
206
+ ultralytics/models/yolo/pose/predict.py,sha256=6EW9palcAoWX-gu5ROQvO6AxBSm719934hhqF-9OGjM,3118
207
+ ultralytics/models/yolo/pose/train.py,sha256=IlmsFlb0TsWZVy6PL3Trr_aXfwwGMBKAHyxnP7VPp_g,4747
208
+ ultralytics/models/yolo/pose/val.py,sha256=0luDccEPb_lUMjzaBb5VMsh9RdXVAbxb3Br57VKWNdc,12004
209
209
  ultralytics/models/yolo/segment/__init__.py,sha256=3IThhZ1wlkY9FvmWm9cE-5-ZyE6F1FgzAtQ6jOOFzzw,275
210
- ultralytics/models/yolo/segment/predict.py,sha256=dWb39_G5EMl9J6AeO8_u-G2di7PdIDzy9iVwcFv7zvU,5430
211
- ultralytics/models/yolo/segment/train.py,sha256=aMsQprA1FX28a0T1cWYmbrsMPawJE6SGwX2rgS_Eb_E,3021
212
- ultralytics/models/yolo/segment/val.py,sha256=XauBfmC-B4ZZQk9qfuI-7tHq1TQ5hemnidlTs4S1WEo,13286
210
+ ultralytics/models/yolo/segment/predict.py,sha256=9CAAjkghFYdGaXYUOcpGZilhwNSnYL2U1Qu_Qc6UtFY,5430
211
+ ultralytics/models/yolo/segment/train.py,sha256=nS3qrT7Y3swCwjGZzeDQ2EunC9ilMsOiWs6LaTUCAE4,3021
212
+ ultralytics/models/yolo/segment/val.py,sha256=EDcwcfwgc9eUKgUL9NIlh_rGhlqcNEAWcRT7KtQj6AQ,13286
213
213
  ultralytics/models/yolo/world/__init__.py,sha256=nlh8I6t8hMGz_vZg8QSlsUW1R-2eKvn9CGUoPPQEGhA,131
214
214
  ultralytics/models/yolo/world/train.py,sha256=80kswko6Zu7peXPBhXcfrTo5HO3Rg8C_cu4vPBQlk7M,7906
215
215
  ultralytics/models/yolo/world/train_world.py,sha256=5Jj4gzEwDJtz37bEahL6Lf4xp-c1xiYjGKeg_w7Esns,8723
@@ -219,34 +219,34 @@ ultralytics/models/yolo/yoloe/train.py,sha256=99iSHQs--5VU_s82Q4w-fAJmyT5-y0TykT
219
219
  ultralytics/models/yolo/yoloe/train_seg.py,sha256=rV2Jnbuh6vvBMaupaZK_aRXBMevO0XhN2VUR43ZwlIY,5285
220
220
  ultralytics/models/yolo/yoloe/val.py,sha256=utUFWeFKRFWZrPr1y3A8ztbTwdoWMYqzlwBN7CQ0tCA,9418
221
221
  ultralytics/nn/__init__.py,sha256=538LZPUKKvc3JCMgiQ4VLGqRN2ZAaVLFcQbeNNHFkEA,545
222
- ultralytics/nn/autobackend.py,sha256=8OzCzLPr7Ube2KAQJg8VSRlSE149Sq9cIWBpI8KzIlg,45057
223
- ultralytics/nn/tasks.py,sha256=dbW3Dn87iSjQK6kqp0oY1mVZJg_zVTwWogMspZ2EyqA,72010
222
+ ultralytics/nn/autobackend.py,sha256=ib-4b7nxFpnU7EHZytKrMt4p8la94ZV02_FF8-ifh1c,45057
223
+ ultralytics/nn/tasks.py,sha256=PmlYScI7qTRCmYRR90Mw1QnqeRzvY0ojAMrgStBr11g,72010
224
224
  ultralytics/nn/text_model.py,sha256=c--WzxjFEDb7p95u3YGcSsJLjj91zFNqXshij8Evrwg,15291
225
225
  ultralytics/nn/modules/__init__.py,sha256=9KyQBxpomp5uJJ1PvMGuOFs2pR3NpqZcFHJlM6Q56c0,3322
226
226
  ultralytics/nn/modules/activation.py,sha256=J6n-CJKFK0YbhwcRDqm9zEJM9pSAEycj5quQss_3x6E,2219
227
227
  ultralytics/nn/modules/block.py,sha256=9d1eelj3uRnf-HWTHYTjsBqLSpMCrwBQuX52MjeapN4,74499
228
228
  ultralytics/nn/modules/conv.py,sha256=9WUlBzHD-wLgz0riLyttzASLIqBtXPK6Jk5EdyIiGCM,21100
229
- ultralytics/nn/modules/head.py,sha256=faOX-YkBJTdEHrwJhFBH1LYRjwoP9yxy6mQf6EIfV3c,78084
229
+ ultralytics/nn/modules/head.py,sha256=eJvXtr_ONGqQVdtsUpJtslplgVblti5sMxP9nkoSa0Y,78057
230
230
  ultralytics/nn/modules/transformer.py,sha256=lAjTH-U8IkBp_1cXSOOFSus9tJf-s8WISKKcXPB84CM,31972
231
231
  ultralytics/nn/modules/utils.py,sha256=EyhENse_RESlXjLHAJWvV07_tq1MVMmfzXgPR1fiT9w,6066
232
232
  ultralytics/optim/__init__.py,sha256=Sl3Dx2eiaJd_u4VbmqcBqWWDF8FHnO5W0nBEL8_M_C4,130
233
233
  ultralytics/optim/muon.py,sha256=Cuak4LOcVVEWIhYm4WzGmww7nhfR1N_uQOpLPX7gV-c,14243
234
234
  ultralytics/solutions/__init__.py,sha256=Jj7OcRiYjHH-e104H4xTgjjR5W6aPB4mBRndbaSPmgU,1209
235
- ultralytics/solutions/ai_gym.py,sha256=ItLE6HYMx6AEgiHEDG1HKDkippnrnycb-79S2g72AYA,5181
235
+ ultralytics/solutions/ai_gym.py,sha256=fq9sIb0RBBvyd7SZShY8TO690lKbpPNOFap4OGi5CI8,5181
236
236
  ultralytics/solutions/analytics.py,sha256=UaH-B6h8Ir9l00deRUeAIW6QQTIO_595HTp93sdwteM,12820
237
- ultralytics/solutions/config.py,sha256=RZMCsnJpoInpADGnuVHTKgH5mKHyDMF4uD4DNZqanpY,5396
237
+ ultralytics/solutions/config.py,sha256=wT_79zyoy_6diG5Iz9JZLzgCuGMaHj770lwRntVuNjQ,5396
238
238
  ultralytics/solutions/distance_calculation.py,sha256=RcpRDodEHAJUug9tobtQKt5_bySNA8NMSRiaL347Q1U,5891
239
- ultralytics/solutions/heatmap.py,sha256=DUyV5UFsOwZ8ArN4BtW8Vm3ps8_VZXc6VP0uiKyGDWY,5481
240
- ultralytics/solutions/instance_segmentation.py,sha256=eggk1uWCZ-6cp0YfxCGVUwnKS6xqJua946oxafjAXGk,3778
239
+ ultralytics/solutions/heatmap.py,sha256=0f7v-0oAGj4no_h1Ll-BGsTmszSBoQ0tNa4azJYAQQw,5481
240
+ ultralytics/solutions/instance_segmentation.py,sha256=poxfCKl4gm7pHhjwULOeIPIRy9q_wOxqwtnUXXE9NhQ,3778
241
241
  ultralytics/solutions/object_blurrer.py,sha256=EZrv3oU68kEaahAxlhk9cF5ZKFtoVaW8bDB4Css9xe0,3981
242
242
  ultralytics/solutions/object_counter.py,sha256=OpMSLlenDK-cLvCgCOoKbqMXIZrngyqP8DP6ZeEnWL8,9355
243
243
  ultralytics/solutions/object_cropper.py,sha256=WRbrfXAR5aD6PQBqJ-BvcVaiaqta_9YeTlXN2dY274s,3510
244
- ultralytics/solutions/parking_management.py,sha256=FQKeLEiwnTmRcXqsNOlOt9GTFPjkyvnE5pwwKnneJa4,13770
244
+ ultralytics/solutions/parking_management.py,sha256=Q0fEFKlv6dKKWuw_4jmWaeHQVXGppzuU7Vr_HqVYqHM,13770
245
245
  ultralytics/solutions/queue_management.py,sha256=NlVX6PMEaffjoZjfQrVyayaDUdtc0JF8GzTQrZFjpCg,4371
246
246
  ultralytics/solutions/region_counter.py,sha256=IAvlFwEYoNftDzfBbdo5MzLwcuidOHW9oTGyRCDzMRc,6025
247
247
  ultralytics/solutions/security_alarm.py,sha256=QjUIVBWcy094VTcOkk_zOq3BmKKOeIaHpVi_QMWo_3Q,6293
248
248
  ultralytics/solutions/similarity_search.py,sha256=Q2FOBUtEokegiJHlfDbPP0bKxr5F-sHN3-IvskDoe00,9644
249
- ultralytics/solutions/solutions.py,sha256=pT3uBxs27BdBud0a4URqVxld3DgcOHgRKxmcTQlXyk4,36984
249
+ ultralytics/solutions/solutions.py,sha256=ktLwDhC0y4k2FbNd0sk7Y8GcEvBu9wL3rXyFGwlbnIQ,36984
250
250
  ultralytics/solutions/speed_estimation.py,sha256=WrZECxKAq6P4QpeTbhkp3-Rqjnox7tdR25fUxzozlpU,5861
251
251
  ultralytics/solutions/streamlit_inference.py,sha256=utJOe0Weu44_ABF9rDnAjwLjKyn3gwfaYaxFfFbx-9c,13060
252
252
  ultralytics/solutions/trackzone.py,sha256=oqv-zZL99RVUMcN5ViAPmadzX6QNdAEozYrrg2pqO6k,3903
@@ -256,26 +256,26 @@ ultralytics/trackers/__init__.py,sha256=n3BOO0TR-Sz5ANDYOkKDipM9nSHOePMEwqafbk-Y
256
256
  ultralytics/trackers/basetrack.py,sha256=F-EW29F9E8GwXr5vzwLqW2rNwItu4KIx2MKce5pQXxI,4374
257
257
  ultralytics/trackers/bot_sort.py,sha256=WImn-BOzGrK9dgMFfMPzKFE5awhXEB2VOi7AbOf_Cdc,11831
258
258
  ultralytics/trackers/byte_tracker.py,sha256=Twmbe3EyqnIds211M84vtuuM1WgHXDykjTMeiAJZzC0,21117
259
- ultralytics/trackers/track.py,sha256=RHgPvx9FNVBL5pUalX2l-jcWrei1UiAXszjeL3V5d-M,4742
259
+ ultralytics/trackers/track.py,sha256=xte5lkVBbOnrZ_tVLsHUmzvtNjbdksTVeSFQtLCLt_M,4742
260
260
  ultralytics/trackers/utils/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
261
261
  ultralytics/trackers/utils/gmc.py,sha256=cvvhNXOhylVQti4pJQSNPx4yPqhhhw1k2yzY0JFl7Zo,13760
262
262
  ultralytics/trackers/utils/kalman_filter.py,sha256=crgysL2bo0v1eTljOlP2YqIJDLBcHjl75MRpbxfaR_M,21514
263
263
  ultralytics/trackers/utils/matching.py,sha256=x6uZOIx0O9oVmAcfY6tYMTJQE2cDTUlRR690Y5UkHLs,7129
264
- ultralytics/utils/__init__.py,sha256=JfvODTB4mG_JOhTeCiPtq0iCEgiCh14hJf195rnOhLQ,55145
264
+ ultralytics/utils/__init__.py,sha256=XLEK_pvptzNWhJaO8x0MWghREIyEDei0LOGnUnmU1Kg,55145
265
265
  ultralytics/utils/autobatch.py,sha256=jiE4m_--H9UkXFDm_FqzcZk_hSTCGpS72XdVEKgZwAo,5114
266
266
  ultralytics/utils/autodevice.py,sha256=rXlPuo-iX-vZ4BabmMGEGh9Uxpau4R7Zlt1KCo9Xfyc,8892
267
- ultralytics/utils/benchmarks.py,sha256=KOFm2AZPehrJajbUu6NTdZoVOFjTpLhUUnfL59sC60w,32293
268
- ultralytics/utils/checks.py,sha256=DheB1ip9ba7ZW_fjPieNdx98vZpwUDbnCKmavAIzJL4,39411
267
+ ultralytics/utils/benchmarks.py,sha256=x0kLvVCRSRx0nEI_suiKM5R-RXLfxhs6pRiA6qwofcg,32293
268
+ ultralytics/utils/checks.py,sha256=NWc0J-Nk4qHSVEXFDWfJkI7IjTNHFXajKjsSodDroBk,39411
269
269
  ultralytics/utils/cpu.py,sha256=OksKOlX93AsbSsFuoYvLXRXgpkOibrZSwQyW6lipt4Q,3493
270
270
  ultralytics/utils/dist.py,sha256=hOuY1-unhQAY-uWiZw3LWw36d1mqJuYK75NdlwB4oKE,4131
271
- ultralytics/utils/downloads.py,sha256=IXM9eTJtt5mdV_9Q-3udwWov0JiVzRI-TXHon-U9gPw,23069
271
+ ultralytics/utils/downloads.py,sha256=TWXkYwR5hEpVMWL6fbjdywDmZe02WhyL_8YuLVce-uM,23069
272
272
  ultralytics/utils/errors.py,sha256=dUZcTWpbJJHqEuWHM6IbeoJJ4TzA_yHBP8E7tEEpBVs,1388
273
273
  ultralytics/utils/events.py,sha256=6vqs_iSxoXIhQ804sOjApNZmXwNW9FUFtjaHPY8ta10,4665
274
- ultralytics/utils/files.py,sha256=BdaRwEKqzle4glSj8n_jq6bDjTCAs_H1SN06ZOQ9qFU,8190
274
+ ultralytics/utils/files.py,sha256=u7pjz13wgkLSBfe_beeZrzar32_gaJWoIVa3nvY3mh8,8190
275
275
  ultralytics/utils/git.py,sha256=UdqeIiiEzg1qkerAZrg5YtTYPuJYwrpxW9N_6Pq6s8U,5501
276
276
  ultralytics/utils/instance.py,sha256=11mhefvTI9ftMqSirXuiViAi0Fxlo6v84qvNxfRNUoE,18862
277
277
  ultralytics/utils/logger.py,sha256=T5iaNnaqbCvx_FZf1dhVkr5FVxyxb4vO17t4SJfCIhg,19132
278
- ultralytics/utils/loss.py,sha256=Y0_EiJtfUwu1gvFIrmnmtrQiJ5rUAZZX4r7DhmG2T3c,56522
278
+ ultralytics/utils/loss.py,sha256=pb4NIzG-vz9MvH4EfdPc6hKFAnEIe6E4dhUZPtTXPHc,56559
279
279
  ultralytics/utils/metrics.py,sha256=puMGn1LfVIlDvx5K7US4RtK8HYW6cRl9OznfV0nUPvk,69261
280
280
  ultralytics/utils/nms.py,sha256=zv1rOzMF6WU8Kdk41VzNf1H1EMt_vZHcbDFbg3mnN2o,14248
281
281
  ultralytics/utils/ops.py,sha256=4xqb7kwrAWm8c_zxOWP5JoXozgsA1Slk2s4XFwmEZCs,26089
@@ -285,7 +285,7 @@ ultralytics/utils/tal.py,sha256=vfcfSy78zdtHbGzlvo5UDx-sCwHLRdGBqDO3CX7ZiR0,2418
285
285
  ultralytics/utils/torch_utils.py,sha256=dHvLaQopIOr9NcIWkLWPX36f5OAFR4thcqm379Zayfc,40278
286
286
  ultralytics/utils/tqdm.py,sha256=f2W608Qpvgu6tFi28qylaZpcRv3IX8wTGY_8lgicaqY,16343
287
287
  ultralytics/utils/triton.py,sha256=BQu3CD3OlT76d1OtmnX5slQU37VC1kzRvEtfI2saIQA,5211
288
- ultralytics/utils/tuner.py,sha256=1PM7G89X95Yfmhskk8LBXU8T-Bfiln1Ajbnz2lkgvAI,7303
288
+ ultralytics/utils/tuner.py,sha256=nRMmnyp0B0gVJzAXcpCxQUnwXjVp0WNiSJwxyR2xvQM,7303
289
289
  ultralytics/utils/callbacks/__init__.py,sha256=hzL63Rce6VkZhP4Lcim9LKjadixaQG86nKqPhk7IkS0,242
290
290
  ultralytics/utils/callbacks/base.py,sha256=floD31JHqHpiVabQiE76_hzC_j7KjtL4w_czkD1bLKc,6883
291
291
  ultralytics/utils/callbacks/clearml.py,sha256=LjfNe4mswceCOpEGVLxqGXjkl_XGbef4awdcp4502RU,5831
@@ -294,17 +294,17 @@ ultralytics/utils/callbacks/dvc.py,sha256=YT0Sa5P8Huj8Fn9jM2P6MYzUY3PIVxsa5BInVi
294
294
  ultralytics/utils/callbacks/hub.py,sha256=fVLqqr3ZM6hoYFlVMEeejfq1MWDrkWCskPFOG3HGILQ,4159
295
295
  ultralytics/utils/callbacks/mlflow.py,sha256=wCXjQgdufp9LYujqMzLZOmIOur6kvrApHNeo9dA7t_g,5323
296
296
  ultralytics/utils/callbacks/neptune.py,sha256=_vt3cMwDHCR-LyT3KtRikGpj6AG11oQ-skUUUUdZ74o,4391
297
- ultralytics/utils/callbacks/platform.py,sha256=0Em258lYzL0CLRJWt6ut3_AiQNIZ4y-P-CBZqRbwhOg,16020
297
+ ultralytics/utils/callbacks/platform.py,sha256=Ufws7Kp_MHh3jrz-Sx5q1KKQ-l1hoDnLi1_thZJsHPQ,16091
298
298
  ultralytics/utils/callbacks/raytune.py,sha256=Y0dFyNZVRuFovSh7nkgUIHTQL3xIXOACElgHuYbg_5I,1278
299
299
  ultralytics/utils/callbacks/tensorboard.py,sha256=PTJYvD2gqRUN8xw5VoTjvKnu2adukLfvhMlDgTnTiFU,4952
300
300
  ultralytics/utils/callbacks/wb.py,sha256=ghmL3gigOa-z_F54-TzMraKw9MAaYX-Wk4H8dLoRvX8,7705
301
301
  ultralytics/utils/export/__init__.py,sha256=Cfh-PwVfTF_lwPp-Ss4wiX4z8Sm1XRPklsqdFfmTZ30,333
302
302
  ultralytics/utils/export/engine.py,sha256=QoXPqnmQn6W5TOUAygOtCG63R9ExDG4-Df6X6W-_Mzo,10470
303
- ultralytics/utils/export/imx.py,sha256=U9CFQJGRSNa5gyrVxW9fEvnhCd6Ut9_mFZZgzhrGhuI,13783
303
+ ultralytics/utils/export/imx.py,sha256=Sj3xKLV6APulltaitauSs3oas_ndbtiHkP-KRI-bxoY,13553
304
304
  ultralytics/utils/export/tensorflow.py,sha256=xHEcEM3_VeYctyqkJCpgkqcNie1M8xLqcFKr6uANEEQ,9951
305
- ultralytics_opencv_headless-8.4.1.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
306
- ultralytics_opencv_headless-8.4.1.dist-info/METADATA,sha256=q4WjyMDwPv9CLi4hBDkkUVkzYh8fUIy6pqODN6xaNBY,36937
307
- ultralytics_opencv_headless-8.4.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
308
- ultralytics_opencv_headless-8.4.1.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
309
- ultralytics_opencv_headless-8.4.1.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
310
- ultralytics_opencv_headless-8.4.1.dist-info/RECORD,,
305
+ ultralytics_opencv_headless-8.4.3.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
306
+ ultralytics_opencv_headless-8.4.3.dist-info/METADATA,sha256=4K427q3e_GOdYacXqp96mGJO9CfO_73MyFSZwwioHoY,38981
307
+ ultralytics_opencv_headless-8.4.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
308
+ ultralytics_opencv_headless-8.4.3.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
309
+ ultralytics_opencv_headless-8.4.3.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
310
+ ultralytics_opencv_headless-8.4.3.dist-info/RECORD,,