dgenerate-ultralytics-headless 8.4.1__py3-none-any.whl → 8.4.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dgenerate_ultralytics_headless-8.4.1.dist-info → dgenerate_ultralytics_headless-8.4.4.dist-info}/METADATA +44 -44
- {dgenerate_ultralytics_headless-8.4.1.dist-info → dgenerate_ultralytics_headless-8.4.4.dist-info}/RECORD +54 -54
- tests/test_exports.py +0 -2
- ultralytics/__init__.py +1 -1
- ultralytics/cfg/__init__.py +20 -22
- ultralytics/data/annotator.py +2 -2
- ultralytics/data/converter.py +57 -38
- ultralytics/engine/exporter.py +23 -24
- ultralytics/engine/model.py +33 -33
- ultralytics/engine/predictor.py +17 -17
- ultralytics/engine/results.py +14 -12
- ultralytics/engine/trainer.py +27 -22
- ultralytics/engine/tuner.py +4 -4
- ultralytics/engine/validator.py +16 -16
- ultralytics/models/yolo/classify/predict.py +1 -1
- ultralytics/models/yolo/classify/train.py +1 -1
- ultralytics/models/yolo/classify/val.py +1 -1
- ultralytics/models/yolo/detect/predict.py +2 -2
- ultralytics/models/yolo/detect/train.py +1 -1
- ultralytics/models/yolo/detect/val.py +1 -1
- ultralytics/models/yolo/model.py +7 -7
- ultralytics/models/yolo/obb/predict.py +1 -1
- ultralytics/models/yolo/obb/train.py +2 -2
- ultralytics/models/yolo/obb/val.py +1 -1
- ultralytics/models/yolo/pose/predict.py +1 -1
- ultralytics/models/yolo/pose/train.py +4 -2
- ultralytics/models/yolo/pose/val.py +1 -1
- ultralytics/models/yolo/segment/predict.py +3 -3
- ultralytics/models/yolo/segment/train.py +3 -3
- ultralytics/models/yolo/segment/val.py +2 -4
- ultralytics/nn/autobackend.py +3 -3
- ultralytics/nn/modules/head.py +1 -1
- ultralytics/nn/tasks.py +12 -12
- ultralytics/solutions/ai_gym.py +3 -3
- ultralytics/solutions/config.py +1 -1
- ultralytics/solutions/heatmap.py +1 -1
- ultralytics/solutions/instance_segmentation.py +2 -2
- ultralytics/solutions/parking_management.py +1 -1
- ultralytics/solutions/solutions.py +2 -2
- ultralytics/trackers/track.py +1 -1
- ultralytics/utils/__init__.py +8 -8
- ultralytics/utils/benchmarks.py +25 -25
- ultralytics/utils/callbacks/platform.py +11 -9
- ultralytics/utils/callbacks/tensorboard.py +2 -0
- ultralytics/utils/checks.py +6 -6
- ultralytics/utils/downloads.py +2 -2
- ultralytics/utils/export/imx.py +24 -17
- ultralytics/utils/files.py +2 -2
- ultralytics/utils/loss.py +3 -3
- ultralytics/utils/tuner.py +2 -2
- {dgenerate_ultralytics_headless-8.4.1.dist-info → dgenerate_ultralytics_headless-8.4.4.dist-info}/WHEEL +0 -0
- {dgenerate_ultralytics_headless-8.4.1.dist-info → dgenerate_ultralytics_headless-8.4.4.dist-info}/entry_points.txt +0 -0
- {dgenerate_ultralytics_headless-8.4.1.dist-info → dgenerate_ultralytics_headless-8.4.4.dist-info}/licenses/LICENSE +0 -0
- {dgenerate_ultralytics_headless-8.4.1.dist-info → dgenerate_ultralytics_headless-8.4.4.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dgenerate-ultralytics-headless
|
|
3
|
-
Version: 8.4.
|
|
3
|
+
Version: 8.4.4
|
|
4
4
|
Summary: Automatically built Ultralytics package with python-opencv-headless dependency instead of python-opencv
|
|
5
5
|
Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
|
|
6
6
|
Maintainer-email: Ultralytics <hello@ultralytics.com>
|
|
@@ -120,7 +120,7 @@ The workflow runs automatically every day at midnight UTC to check for new Ultra
|
|
|
120
120
|
|
|
121
121
|
<div align="center">
|
|
122
122
|
<p>
|
|
123
|
-
<a href="https://
|
|
123
|
+
<a href="https://platform.ultralytics.com/ultralytics/yolo26" target="_blank">
|
|
124
124
|
<img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/main/yolov8/banner-yolov8.png" alt="Ultralytics YOLO banner"></a>
|
|
125
125
|
</p>
|
|
126
126
|
|
|
@@ -147,8 +147,8 @@ Find detailed documentation in the [Ultralytics Docs](https://docs.ultralytics.c
|
|
|
147
147
|
|
|
148
148
|
Request an Enterprise License for commercial use at [Ultralytics Licensing](https://www.ultralytics.com/license).
|
|
149
149
|
|
|
150
|
-
<a href="https://
|
|
151
|
-
<img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/refs/heads/main/yolo/performance-comparison.png" alt="
|
|
150
|
+
<a href="https://platform.ultralytics.com/ultralytics/yolo26" target="_blank">
|
|
151
|
+
<img width="100%" src="https://raw.githubusercontent.com/ultralytics/assets/refs/heads/main/yolo/performance-comparison.png" alt="YOLO26 performance plots">
|
|
152
152
|
</a>
|
|
153
153
|
|
|
154
154
|
<div align="center">
|
|
@@ -249,13 +249,13 @@ Ultralytics supports a wide range of YOLO models, from early versions like [YOLO
|
|
|
249
249
|
|
|
250
250
|
Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usage examples. These models are trained on the [COCO dataset](https://cocodataset.org/), featuring 80 object classes.
|
|
251
251
|
|
|
252
|
-
| Model | size<br><sup>(pixels) | mAP<sup>val<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
253
|
-
| ------------------------------------------------------------------------------------ |
|
|
254
|
-
| [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640
|
|
255
|
-
| [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640
|
|
256
|
-
| [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640
|
|
257
|
-
| [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640
|
|
258
|
-
| [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640
|
|
252
|
+
| Model | size<br><sup>(pixels)</sup> | mAP<sup>val<br>50-95</sup> | mAP<sup>val<br>50-95(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
|
|
253
|
+
| ------------------------------------------------------------------------------------ | --------------------------- | -------------------------- | ------------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
|
|
254
|
+
| [YOLO26n](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n.pt) | 640 | 40.9 | 40.1 | 38.9 ± 0.7 | 1.7 ± 0.0 | 2.4 | 5.4 |
|
|
255
|
+
| [YOLO26s](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s.pt) | 640 | 48.6 | 47.8 | 87.2 ± 0.9 | 2.5 ± 0.0 | 9.5 | 20.7 |
|
|
256
|
+
| [YOLO26m](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m.pt) | 640 | 53.1 | 52.5 | 220.0 ± 1.4 | 4.7 ± 0.1 | 20.4 | 68.2 |
|
|
257
|
+
| [YOLO26l](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l.pt) | 640 | 55.0 | 54.4 | 286.2 ± 2.0 | 6.2 ± 0.2 | 24.8 | 86.4 |
|
|
258
|
+
| [YOLO26x](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x.pt) | 640 | 57.5 | 56.9 | 525.8 ± 4.0 | 11.8 ± 0.2 | 55.7 | 193.9 |
|
|
259
259
|
|
|
260
260
|
- **mAP<sup>val</sup>** values refer to single-model single-scale performance on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val detect data=coco.yaml device=0`
|
|
261
261
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val detect data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -266,13 +266,13 @@ Explore the [Detection Docs](https://docs.ultralytics.com/tasks/detect/) for usa
|
|
|
266
266
|
|
|
267
267
|
Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage examples. These models are trained on [COCO-Seg](https://docs.ultralytics.com/datasets/segment/coco/), including 80 classes.
|
|
268
268
|
|
|
269
|
-
| Model | size<br><sup>(pixels) | mAP<sup>box<br>50-95 | mAP<sup>mask<br>50-95 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
270
|
-
| -------------------------------------------------------------------------------------------- |
|
|
271
|
-
| [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640
|
|
272
|
-
| [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640
|
|
273
|
-
| [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640
|
|
274
|
-
| [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640
|
|
275
|
-
| [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640
|
|
269
|
+
| Model | size<br><sup>(pixels)</sup> | mAP<sup>box<br>50-95(e2e)</sup> | mAP<sup>mask<br>50-95(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
|
|
270
|
+
| -------------------------------------------------------------------------------------------- | --------------------------- | ------------------------------- | -------------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
|
|
271
|
+
| [YOLO26n-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-seg.pt) | 640 | 39.6 | 33.9 | 53.3 ± 0.5 | 2.1 ± 0.0 | 2.7 | 9.1 |
|
|
272
|
+
| [YOLO26s-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-seg.pt) | 640 | 47.3 | 40.0 | 118.4 ± 0.9 | 3.3 ± 0.0 | 10.4 | 34.2 |
|
|
273
|
+
| [YOLO26m-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-seg.pt) | 640 | 52.5 | 44.1 | 328.2 ± 2.4 | 6.7 ± 0.1 | 23.6 | 121.5 |
|
|
274
|
+
| [YOLO26l-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-seg.pt) | 640 | 54.4 | 45.5 | 387.0 ± 3.7 | 8.0 ± 0.1 | 28.0 | 139.8 |
|
|
275
|
+
| [YOLO26x-seg](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-seg.pt) | 640 | 56.5 | 47.0 | 787.0 ± 6.8 | 16.4 ± 0.1 | 62.8 | 313.5 |
|
|
276
276
|
|
|
277
277
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO val2017](https://cocodataset.org/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val segment data=coco.yaml device=0`
|
|
278
278
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val segment data=coco.yaml batch=1 device=0|cpu`
|
|
@@ -283,13 +283,13 @@ Refer to the [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) fo
|
|
|
283
283
|
|
|
284
284
|
Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usage examples. These models are trained on [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/), covering 1000 classes.
|
|
285
285
|
|
|
286
|
-
| Model | size<br><sup>(pixels) | acc<br><sup>top1 | acc<br><sup>top5 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) at 224 |
|
|
287
|
-
| -------------------------------------------------------------------------------------------- |
|
|
288
|
-
| [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224
|
|
289
|
-
| [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224
|
|
290
|
-
| [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224
|
|
291
|
-
| [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224
|
|
292
|
-
| [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224
|
|
286
|
+
| Model | size<br><sup>(pixels)</sup> | acc<br><sup>top1</sup> | acc<br><sup>top5</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B) at 224</sup> |
|
|
287
|
+
| -------------------------------------------------------------------------------------------- | --------------------------- | ---------------------- | ---------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ------------------------------ |
|
|
288
|
+
| [YOLO26n-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-cls.pt) | 224 | 71.4 | 90.1 | 5.0 ± 0.3 | 1.1 ± 0.0 | 2.8 | 0.5 |
|
|
289
|
+
| [YOLO26s-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-cls.pt) | 224 | 76.0 | 92.9 | 7.9 ± 0.2 | 1.3 ± 0.0 | 6.7 | 1.6 |
|
|
290
|
+
| [YOLO26m-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-cls.pt) | 224 | 78.1 | 94.2 | 17.2 ± 0.4 | 2.0 ± 0.0 | 11.6 | 4.9 |
|
|
291
|
+
| [YOLO26l-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-cls.pt) | 224 | 79.0 | 94.6 | 23.2 ± 0.3 | 2.8 ± 0.0 | 14.1 | 6.2 |
|
|
292
|
+
| [YOLO26x-cls](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-cls.pt) | 224 | 79.9 | 95.0 | 41.4 ± 0.9 | 3.8 ± 0.0 | 29.6 | 13.6 |
|
|
293
293
|
|
|
294
294
|
- **acc** values represent model accuracy on the [ImageNet](https://www.image-net.org/) dataset validation set. <br>Reproduce with `yolo val classify data=path/to/ImageNet device=0`
|
|
295
295
|
- **Speed** metrics are averaged over ImageNet val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val classify data=path/to/ImageNet batch=1 device=0|cpu`
|
|
@@ -300,13 +300,13 @@ Consult the [Classification Docs](https://docs.ultralytics.com/tasks/classify/)
|
|
|
300
300
|
|
|
301
301
|
See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usage examples. These models are trained on [COCO-Pose](https://docs.ultralytics.com/datasets/pose/coco/), focusing on the 'person' class.
|
|
302
302
|
|
|
303
|
-
| Model | size<br><sup>(pixels) | mAP<sup>pose<br>50-95 | mAP<sup>pose<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
304
|
-
| ---------------------------------------------------------------------------------------------- |
|
|
305
|
-
| [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640
|
|
306
|
-
| [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640
|
|
307
|
-
| [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640
|
|
308
|
-
| [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640
|
|
309
|
-
| [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640
|
|
303
|
+
| Model | size<br><sup>(pixels)</sup> | mAP<sup>pose<br>50-95(e2e)</sup> | mAP<sup>pose<br>50(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
|
|
304
|
+
| ---------------------------------------------------------------------------------------------- | --------------------------- | -------------------------------- | ----------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
|
|
305
|
+
| [YOLO26n-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-pose.pt) | 640 | 57.2 | 83.3 | 40.3 ± 0.5 | 1.8 ± 0.0 | 2.9 | 7.5 |
|
|
306
|
+
| [YOLO26s-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-pose.pt) | 640 | 63.0 | 86.6 | 85.3 ± 0.9 | 2.7 ± 0.0 | 10.4 | 23.9 |
|
|
307
|
+
| [YOLO26m-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-pose.pt) | 640 | 68.8 | 89.6 | 218.0 ± 1.5 | 5.0 ± 0.1 | 21.5 | 73.1 |
|
|
308
|
+
| [YOLO26l-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-pose.pt) | 640 | 70.4 | 90.5 | 275.4 ± 2.4 | 6.5 ± 0.1 | 25.9 | 91.3 |
|
|
309
|
+
| [YOLO26x-pose](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-pose.pt) | 640 | 71.6 | 91.6 | 565.4 ± 3.0 | 12.2 ± 0.2 | 57.6 | 201.7 |
|
|
310
310
|
|
|
311
311
|
- **mAP<sup>val</sup>** values are for single-model single-scale on the [COCO Keypoints val2017](https://docs.ultralytics.com/datasets/pose/coco/) dataset. See [YOLO Performance Metrics](https://docs.ultralytics.com/guides/yolo-performance-metrics/) for details. <br>Reproduce with `yolo val pose data=coco-pose.yaml device=0`
|
|
312
312
|
- **Speed** metrics are averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce with `yolo val pose data=coco-pose.yaml batch=1 device=0|cpu`
|
|
@@ -317,13 +317,13 @@ See the [Pose Estimation Docs](https://docs.ultralytics.com/tasks/pose/) for usa
|
|
|
317
317
|
|
|
318
318
|
Check the [OBB Docs](https://docs.ultralytics.com/tasks/obb/) for usage examples. These models are trained on [DOTAv1](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10/), including 15 classes.
|
|
319
319
|
|
|
320
|
-
| Model | size<br><sup>(pixels) | mAP<sup>test<br>50 | Speed<br><sup>CPU ONNX<br>(ms) | Speed<br><sup>T4 TensorRT10<br>(ms) | params<br><sup>(M) | FLOPs<br><sup>(B) |
|
|
321
|
-
| -------------------------------------------------------------------------------------------- |
|
|
322
|
-
| [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024
|
|
323
|
-
| [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024
|
|
324
|
-
| [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024
|
|
325
|
-
| [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024
|
|
326
|
-
| [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024
|
|
320
|
+
| Model | size<br><sup>(pixels)</sup> | mAP<sup>test<br>50-95(e2e)</sup> | mAP<sup>test<br>50(e2e)</sup> | Speed<br><sup>CPU ONNX<br>(ms)</sup> | Speed<br><sup>T4 TensorRT10<br>(ms)</sup> | params<br><sup>(M)</sup> | FLOPs<br><sup>(B)</sup> |
|
|
321
|
+
| -------------------------------------------------------------------------------------------- | --------------------------- | -------------------------------- | ----------------------------- | ------------------------------------ | ----------------------------------------- | ------------------------ | ----------------------- |
|
|
322
|
+
| [YOLO26n-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26n-obb.pt) | 1024 | 52.4 | 78.9 | 97.7 ± 0.9 | 2.8 ± 0.0 | 2.5 | 14.0 |
|
|
323
|
+
| [YOLO26s-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26s-obb.pt) | 1024 | 54.8 | 80.9 | 218.0 ± 1.4 | 4.9 ± 0.1 | 9.8 | 55.1 |
|
|
324
|
+
| [YOLO26m-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26m-obb.pt) | 1024 | 55.3 | 81.0 | 579.2 ± 3.8 | 10.2 ± 0.3 | 21.2 | 183.3 |
|
|
325
|
+
| [YOLO26l-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26l-obb.pt) | 1024 | 56.2 | 81.6 | 735.6 ± 3.1 | 13.0 ± 0.2 | 25.6 | 230.0 |
|
|
326
|
+
| [YOLO26x-obb](https://github.com/ultralytics/assets/releases/download/v8.4.0/yolo26x-obb.pt) | 1024 | 56.7 | 81.7 | 1485.7 ± 11.5 | 30.5 ± 0.9 | 57.6 | 516.5 |
|
|
327
327
|
|
|
328
328
|
- **mAP<sup>test</sup>** values are for single-model multiscale performance on the [DOTAv1 test set](https://captain-whu.github.io/DOTA/dataset.html). <br>Reproduce by `yolo val obb data=DOTAv1.yaml device=0 split=test` and submit merged results to the [DOTA evaluation server](https://captain-whu.github.io/DOTA/evaluation.html).
|
|
329
329
|
- **Speed** metrics are averaged over [DOTAv1 val images](https://docs.ultralytics.com/datasets/obb/dota-v2/#dota-v10) using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. CPU speeds measured with [ONNX](https://onnx.ai/) export. GPU speeds measured with [TensorRT](https://developer.nvidia.com/tensorrt) export. <br>Reproduce by `yolo val obb data=DOTAv1.yaml batch=1 device=0|cpu`
|
|
@@ -341,8 +341,8 @@ Our key integrations with leading AI platforms extend the functionality of Ultra
|
|
|
341
341
|
<br>
|
|
342
342
|
|
|
343
343
|
<div align="center">
|
|
344
|
-
<a href="https://
|
|
345
|
-
<img src="https://github.com/ultralytics/assets/raw/main/partners/logo-ultralytics-hub.png" width="10%" alt="Ultralytics
|
|
344
|
+
<a href="https://platform.ultralytics.com/ultralytics/yolo26">
|
|
345
|
+
<img src="https://github.com/ultralytics/assets/raw/main/partners/logo-ultralytics-hub.png" width="10%" alt="Ultralytics Platform logo"></a>
|
|
346
346
|
<img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="15%" height="0" alt="space">
|
|
347
347
|
<a href="https://docs.ultralytics.com/integrations/weights-biases/">
|
|
348
348
|
<img src="https://github.com/ultralytics/assets/raw/main/partners/logo-wb.png" width="10%" alt="Weights & Biases logo"></a>
|
|
@@ -354,9 +354,9 @@ Our key integrations with leading AI platforms extend the functionality of Ultra
|
|
|
354
354
|
<img src="https://github.com/ultralytics/assets/raw/main/partners/logo-neuralmagic.png" width="10%" alt="Neural Magic logo"></a>
|
|
355
355
|
</div>
|
|
356
356
|
|
|
357
|
-
|
|
|
358
|
-
|
|
|
359
|
-
| Streamline YOLO workflows: Label, train, and deploy effortlessly with [Ultralytics
|
|
357
|
+
| Ultralytics Platform 🌟 | Weights & Biases | Comet | Neural Magic |
|
|
358
|
+
| :---------------------------------------------------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------: |
|
|
359
|
+
| Streamline YOLO workflows: Label, train, and deploy effortlessly with [Ultralytics Platform](https://platform.ultralytics.com/ultralytics/yolo26). Try now! | Track experiments, hyperparameters, and results with [Weights & Biases](https://docs.ultralytics.com/integrations/weights-biases/). | Free forever, [Comet ML](https://docs.ultralytics.com/integrations/comet/) lets you save YOLO models, resume training, and interactively visualize predictions. | Run YOLO inference up to 6x faster with [Neural Magic DeepSparse](https://docs.ultralytics.com/integrations/neural-magic/). |
|
|
360
360
|
|
|
361
361
|
## 🤝 Contribute
|
|
362
362
|
|
|
@@ -1,18 +1,18 @@
|
|
|
1
|
-
dgenerate_ultralytics_headless-8.4.
|
|
1
|
+
dgenerate_ultralytics_headless-8.4.4.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
|
|
2
2
|
tests/__init__.py,sha256=hfUXxYLJB3846OCzWV94ZKEZsi8vq9Pqrdd2mMgjjck,804
|
|
3
3
|
tests/conftest.py,sha256=rlKyDuOC_3ptXrWS8Q19bNEGOupUmYXHj3nB6o1GBGY,2318
|
|
4
4
|
tests/test_cli.py,sha256=GhIFHi-_WIJpDgoGNRi0DnjbfwP1wHbklBMnkCM-P_4,5464
|
|
5
5
|
tests/test_cuda.py,sha256=2TBe-ZkecMOGPWLdHcbsAjH3m9c5SQJ2KeyICgS0aeo,8426
|
|
6
6
|
tests/test_engine.py,sha256=ufSn3X4kL_Lpn2O25jKAfw_9QwHTMRjP9shDdpgBqnY,5740
|
|
7
|
-
tests/test_exports.py,sha256=
|
|
7
|
+
tests/test_exports.py,sha256=Toy4u-4bsoyAbzNhc9kbMuKqvMKywZxNj5jlFNTzFWs,14670
|
|
8
8
|
tests/test_integrations.py,sha256=FjvTGjXm3bvYHK3_obgObhC5SzHCTzw4aOJV9Hh08jQ,6220
|
|
9
9
|
tests/test_python.py,sha256=np6on3Sa0NNi5pquvilekjKxxedAJMpLOQEthGaIalQ,29284
|
|
10
10
|
tests/test_solutions.py,sha256=1tRlM72YciE42Nk9v83gsXOD5RSx9GSWVsKGhH7-HxE,14122
|
|
11
|
-
ultralytics/__init__.py,sha256=
|
|
11
|
+
ultralytics/__init__.py,sha256=sJYUdz1Qx-pwzIz34CD4B1PgspkWiGojpY2uQ6D5lE0,1300
|
|
12
12
|
ultralytics/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
|
|
13
13
|
ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
|
|
14
14
|
ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
|
|
15
|
-
ultralytics/cfg/__init__.py,sha256=
|
|
15
|
+
ultralytics/cfg/__init__.py,sha256=_LkOX0ZG8AlWr_NG2KW7E8SQ7DqVeD_vSiYUd2EKXA4,40288
|
|
16
16
|
ultralytics/cfg/default.yaml,sha256=E__q2msvK9XCQngf0YFLpueCer_1tRcMJM0p3ahBdbA,9015
|
|
17
17
|
ultralytics/cfg/datasets/Argoverse.yaml,sha256=QGpdh3Hj5dFrvbsaE_8rAVj9BO4XpKTB7uhXaTTnE-o,3364
|
|
18
18
|
ultralytics/cfg/datasets/DOTAv1.5.yaml,sha256=KE7VC-ZMDSei1pLPm-pdk_ZAMRU_gLwGgtIQNbwp6dA,1212
|
|
@@ -118,11 +118,11 @@ ultralytics/cfg/models/v9/yolov9t.yaml,sha256=Q8GpSXE7fumhuJiQg4a2SkuS_UmnXqp-eo
|
|
|
118
118
|
ultralytics/cfg/trackers/botsort.yaml,sha256=tRxC-qT4Wz0mLn5x7ZEwrqgGKrmTDVY7gMge-mhpe7U,1431
|
|
119
119
|
ultralytics/cfg/trackers/bytetrack.yaml,sha256=7LS1ObP5u7BUFcmeY6L2m3bRuPUktnpJspFKd_ElVWc,908
|
|
120
120
|
ultralytics/data/__init__.py,sha256=ToR8zl0JhBHy42ZvV7zIwO_F3lbi5oNlGQNPK3dlddU,644
|
|
121
|
-
ultralytics/data/annotator.py,sha256=
|
|
121
|
+
ultralytics/data/annotator.py,sha256=iu1En-LzlR4RyR3ocftthnAog_peQHV9ForPRo_QcX8,2985
|
|
122
122
|
ultralytics/data/augment.py,sha256=4xtggkuysYcbK5pYwNuAaoCzshb5wwD9KN6_pP4uSFU,128003
|
|
123
123
|
ultralytics/data/base.py,sha256=pMs8yJOmAFPXdgfLCDtUemSvkPNDzxReP-fWzkNtonc,19723
|
|
124
124
|
ultralytics/data/build.py,sha256=s-tkSZPf3OfQyfXPXB9XxdW_gIcU6Xy_u21ekSgTnRo,17205
|
|
125
|
-
ultralytics/data/converter.py,sha256=
|
|
125
|
+
ultralytics/data/converter.py,sha256=KUFVQuesnABjm7nW90kxQ6WeYavbo7AC7ZtfuxGvPE4,33107
|
|
126
126
|
ultralytics/data/dataset.py,sha256=r_BZy4FwMZ-dYkaJiz1E3jr2pI6dn7V3hZwf2RM9_RQ,36536
|
|
127
127
|
ultralytics/data/loaders.py,sha256=BQbhgjiLCGcRBPkGVG9Hr1jeNfG1nuZD3jstiWb7zS8,31889
|
|
128
128
|
ultralytics/data/split.py,sha256=HpR0ltf5oN1DpZstavFbBFC1YdpGPaATXxDOcAMwOqc,5101
|
|
@@ -133,13 +133,13 @@ ultralytics/data/scripts/get_coco.sh,sha256=UuJpJeo3qQpTHVINeOpmP0NYmg8PhEFE3A8J
|
|
|
133
133
|
ultralytics/data/scripts/get_coco128.sh,sha256=qmRQl_hOKrsdHrTrnyQuFIH01oDz3lfaz138OgGfLt8,650
|
|
134
134
|
ultralytics/data/scripts/get_imagenet.sh,sha256=hr42H16bM47iT27rgS7MpEo-GeOZAYUQXgr0B2cwn48,1705
|
|
135
135
|
ultralytics/engine/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
|
|
136
|
-
ultralytics/engine/exporter.py,sha256=
|
|
137
|
-
ultralytics/engine/model.py,sha256=
|
|
138
|
-
ultralytics/engine/predictor.py,sha256=
|
|
139
|
-
ultralytics/engine/results.py,sha256=
|
|
140
|
-
ultralytics/engine/trainer.py,sha256=
|
|
141
|
-
ultralytics/engine/tuner.py,sha256=
|
|
142
|
-
ultralytics/engine/validator.py,sha256=
|
|
136
|
+
ultralytics/engine/exporter.py,sha256=n_DtRhD0jT9sTFb8oQ_TYdQYTQJbsQzwqdISwR-mQY4,73330
|
|
137
|
+
ultralytics/engine/model.py,sha256=euDHUy7J5vVBvS_d-KbGZd_0BP5bF6Y3cTQ7VXtwZ4k,53210
|
|
138
|
+
ultralytics/engine/predictor.py,sha256=tXrHSTHJ-rDQ3lrPW9P5_ei_ewTwbY2sji6MExybJ28,22838
|
|
139
|
+
ultralytics/engine/results.py,sha256=uvD7WqaePkuYbcf-iFqh3DIy5_ZSyHeDiKIzY5VjePM,68181
|
|
140
|
+
ultralytics/engine/trainer.py,sha256=lvYPaEkaGXuGnH8j19aMIB2BML3b0LhEqt-HyZ_I6nU,47219
|
|
141
|
+
ultralytics/engine/tuner.py,sha256=F4fyQaC5_GT74TULRO0VhzTv2S_a54cZDc3FjFoqaHE,21840
|
|
142
|
+
ultralytics/engine/validator.py,sha256=DiKsygbNJdRdwXoKoYOJA6bP_T7vMW3Syj_Qc_l7xTM,17761
|
|
143
143
|
ultralytics/hub/__init__.py,sha256=Z0K_E00jzQh90b18q3IDChwVmTvyIYp6C00sCV-n2F8,6709
|
|
144
144
|
ultralytics/hub/auth.py,sha256=ANzCeZA7lUzTWc_sFHbDuuyBh1jLl2sTpHkoUbIkFYE,6254
|
|
145
145
|
ultralytics/hub/session.py,sha256=OzBXAL9R135gRDdfNYUqyiSrxOyaiMFCVYSZua99sF0,18364
|
|
@@ -190,27 +190,27 @@ ultralytics/models/utils/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXp
|
|
|
190
190
|
ultralytics/models/utils/loss.py,sha256=9CcqRXDj5-I-7eZuenInvyoLcPf22Ynf3rUFA5V22bI,21131
|
|
191
191
|
ultralytics/models/utils/ops.py,sha256=z-Ebjv_k14bWOoP6nszDzDBiy3yELcVtbj6M8PsRpvE,15207
|
|
192
192
|
ultralytics/models/yolo/__init__.py,sha256=YD407NDDiyjo0x_MR6usJaTpePKPgsfBUYehlCw7lRs,307
|
|
193
|
-
ultralytics/models/yolo/model.py,sha256=
|
|
193
|
+
ultralytics/models/yolo/model.py,sha256=HXkglzJQqW1x7MJaKavI5aasA-0lSH21Xcv_dac3SFU,18504
|
|
194
194
|
ultralytics/models/yolo/classify/__init__.py,sha256=9--HVaNOfI1K7rn_rRqclL8FUAnpfeBrRqEQIaQw2xM,383
|
|
195
|
-
ultralytics/models/yolo/classify/predict.py,sha256=
|
|
196
|
-
ultralytics/models/yolo/classify/train.py,sha256=
|
|
197
|
-
ultralytics/models/yolo/classify/val.py,sha256=
|
|
195
|
+
ultralytics/models/yolo/classify/predict.py,sha256=HCStYkSqeg32SNTWfr4FDCkUMQ4wnKqceUK3T995us4,4137
|
|
196
|
+
ultralytics/models/yolo/classify/train.py,sha256=41ZxaIJkzkRxfgq6VffFX5Xfsrm9tNv3i3bdtUPAocE,8958
|
|
197
|
+
ultralytics/models/yolo/classify/val.py,sha256=akH2P3nff4oiZtV2toKB3Z9HIbsVcwsb1uvDwhamszw,10503
|
|
198
198
|
ultralytics/models/yolo/detect/__init__.py,sha256=GIRsLYR-kT4JJx7lh4ZZAFGBZj0aebokuU0A7JbjDVA,257
|
|
199
|
-
ultralytics/models/yolo/detect/predict.py,sha256=
|
|
200
|
-
ultralytics/models/yolo/detect/train.py,sha256=
|
|
201
|
-
ultralytics/models/yolo/detect/val.py,sha256=
|
|
199
|
+
ultralytics/models/yolo/detect/predict.py,sha256=2nxlMyw_zVKq1aeJFRTgb4EGL2vOFq4pLT9tArHBfF8,5385
|
|
200
|
+
ultralytics/models/yolo/detect/train.py,sha256=ffM3ULnR9Kbw_1yBq2I6BWa7V124lfQtU0_C_GHhwRI,10519
|
|
201
|
+
ultralytics/models/yolo/detect/val.py,sha256=54AOR6r3istE0pILJ1v4xzPdv7UcvtTEZ6E5OGj3Jgc,22818
|
|
202
202
|
ultralytics/models/yolo/obb/__init__.py,sha256=tQmpG8wVHsajWkZdmD6cjGohJ4ki64iSXQT8JY_dydo,221
|
|
203
|
-
ultralytics/models/yolo/obb/predict.py,sha256=
|
|
204
|
-
ultralytics/models/yolo/obb/train.py,sha256=
|
|
205
|
-
ultralytics/models/yolo/obb/val.py,sha256=
|
|
203
|
+
ultralytics/models/yolo/obb/predict.py,sha256=I7hWDr1zuy2WuwGom9uzXqomfr7qVMWb7iRl18xdTYw,2577
|
|
204
|
+
ultralytics/models/yolo/obb/train.py,sha256=HEDdPiP-yBbrUQWllcD1rc3gGrbzQmT6RBMTGtmVOu0,3452
|
|
205
|
+
ultralytics/models/yolo/obb/val.py,sha256=qYNe7ZcW3rhTLYPw15OeGfBaqaa_f1ADs4FF21h32e4,14513
|
|
206
206
|
ultralytics/models/yolo/pose/__init__.py,sha256=_9OFLj19XwvJHBRxQtVW5CV7rvJ_3hDPE97miit0sPc,227
|
|
207
|
-
ultralytics/models/yolo/pose/predict.py,sha256=
|
|
208
|
-
ultralytics/models/yolo/pose/train.py,sha256=
|
|
209
|
-
ultralytics/models/yolo/pose/val.py,sha256=
|
|
207
|
+
ultralytics/models/yolo/pose/predict.py,sha256=6EW9palcAoWX-gu5ROQvO6AxBSm719934hhqF-9OGjM,3118
|
|
208
|
+
ultralytics/models/yolo/pose/train.py,sha256=IlmsFlb0TsWZVy6PL3Trr_aXfwwGMBKAHyxnP7VPp_g,4747
|
|
209
|
+
ultralytics/models/yolo/pose/val.py,sha256=0luDccEPb_lUMjzaBb5VMsh9RdXVAbxb3Br57VKWNdc,12004
|
|
210
210
|
ultralytics/models/yolo/segment/__init__.py,sha256=3IThhZ1wlkY9FvmWm9cE-5-ZyE6F1FgzAtQ6jOOFzzw,275
|
|
211
|
-
ultralytics/models/yolo/segment/predict.py,sha256=
|
|
212
|
-
ultralytics/models/yolo/segment/train.py,sha256=
|
|
213
|
-
ultralytics/models/yolo/segment/val.py,sha256=
|
|
211
|
+
ultralytics/models/yolo/segment/predict.py,sha256=zLhmSTVEnaUumIX9SbjZH09kr2VrNdYWEss7FvseVuY,5428
|
|
212
|
+
ultralytics/models/yolo/segment/train.py,sha256=nS3qrT7Y3swCwjGZzeDQ2EunC9ilMsOiWs6LaTUCAE4,3021
|
|
213
|
+
ultralytics/models/yolo/segment/val.py,sha256=AvPS4rhV2PFpi0yixUfJhdczXctmZQSKgTjh7qVH0To,13204
|
|
214
214
|
ultralytics/models/yolo/world/__init__.py,sha256=nlh8I6t8hMGz_vZg8QSlsUW1R-2eKvn9CGUoPPQEGhA,131
|
|
215
215
|
ultralytics/models/yolo/world/train.py,sha256=80kswko6Zu7peXPBhXcfrTo5HO3Rg8C_cu4vPBQlk7M,7906
|
|
216
216
|
ultralytics/models/yolo/world/train_world.py,sha256=5Jj4gzEwDJtz37bEahL6Lf4xp-c1xiYjGKeg_w7Esns,8723
|
|
@@ -220,34 +220,34 @@ ultralytics/models/yolo/yoloe/train.py,sha256=99iSHQs--5VU_s82Q4w-fAJmyT5-y0TykT
|
|
|
220
220
|
ultralytics/models/yolo/yoloe/train_seg.py,sha256=rV2Jnbuh6vvBMaupaZK_aRXBMevO0XhN2VUR43ZwlIY,5285
|
|
221
221
|
ultralytics/models/yolo/yoloe/val.py,sha256=utUFWeFKRFWZrPr1y3A8ztbTwdoWMYqzlwBN7CQ0tCA,9418
|
|
222
222
|
ultralytics/nn/__init__.py,sha256=538LZPUKKvc3JCMgiQ4VLGqRN2ZAaVLFcQbeNNHFkEA,545
|
|
223
|
-
ultralytics/nn/autobackend.py,sha256=
|
|
224
|
-
ultralytics/nn/tasks.py,sha256=
|
|
223
|
+
ultralytics/nn/autobackend.py,sha256=MLS68iMNv6U0HyBK8nGjcyLOyImYIGEjP4398KqOkV0,45068
|
|
224
|
+
ultralytics/nn/tasks.py,sha256=PmlYScI7qTRCmYRR90Mw1QnqeRzvY0ojAMrgStBr11g,72010
|
|
225
225
|
ultralytics/nn/text_model.py,sha256=c--WzxjFEDb7p95u3YGcSsJLjj91zFNqXshij8Evrwg,15291
|
|
226
226
|
ultralytics/nn/modules/__init__.py,sha256=9KyQBxpomp5uJJ1PvMGuOFs2pR3NpqZcFHJlM6Q56c0,3322
|
|
227
227
|
ultralytics/nn/modules/activation.py,sha256=J6n-CJKFK0YbhwcRDqm9zEJM9pSAEycj5quQss_3x6E,2219
|
|
228
228
|
ultralytics/nn/modules/block.py,sha256=9d1eelj3uRnf-HWTHYTjsBqLSpMCrwBQuX52MjeapN4,74499
|
|
229
229
|
ultralytics/nn/modules/conv.py,sha256=9WUlBzHD-wLgz0riLyttzASLIqBtXPK6Jk5EdyIiGCM,21100
|
|
230
|
-
ultralytics/nn/modules/head.py,sha256=
|
|
230
|
+
ultralytics/nn/modules/head.py,sha256=eJvXtr_ONGqQVdtsUpJtslplgVblti5sMxP9nkoSa0Y,78057
|
|
231
231
|
ultralytics/nn/modules/transformer.py,sha256=lAjTH-U8IkBp_1cXSOOFSus9tJf-s8WISKKcXPB84CM,31972
|
|
232
232
|
ultralytics/nn/modules/utils.py,sha256=EyhENse_RESlXjLHAJWvV07_tq1MVMmfzXgPR1fiT9w,6066
|
|
233
233
|
ultralytics/optim/__init__.py,sha256=Sl3Dx2eiaJd_u4VbmqcBqWWDF8FHnO5W0nBEL8_M_C4,130
|
|
234
234
|
ultralytics/optim/muon.py,sha256=Cuak4LOcVVEWIhYm4WzGmww7nhfR1N_uQOpLPX7gV-c,14243
|
|
235
235
|
ultralytics/solutions/__init__.py,sha256=Jj7OcRiYjHH-e104H4xTgjjR5W6aPB4mBRndbaSPmgU,1209
|
|
236
|
-
ultralytics/solutions/ai_gym.py,sha256=
|
|
236
|
+
ultralytics/solutions/ai_gym.py,sha256=fq9sIb0RBBvyd7SZShY8TO690lKbpPNOFap4OGi5CI8,5181
|
|
237
237
|
ultralytics/solutions/analytics.py,sha256=UaH-B6h8Ir9l00deRUeAIW6QQTIO_595HTp93sdwteM,12820
|
|
238
|
-
ultralytics/solutions/config.py,sha256=
|
|
238
|
+
ultralytics/solutions/config.py,sha256=wT_79zyoy_6diG5Iz9JZLzgCuGMaHj770lwRntVuNjQ,5396
|
|
239
239
|
ultralytics/solutions/distance_calculation.py,sha256=RcpRDodEHAJUug9tobtQKt5_bySNA8NMSRiaL347Q1U,5891
|
|
240
|
-
ultralytics/solutions/heatmap.py,sha256=
|
|
241
|
-
ultralytics/solutions/instance_segmentation.py,sha256=
|
|
240
|
+
ultralytics/solutions/heatmap.py,sha256=0f7v-0oAGj4no_h1Ll-BGsTmszSBoQ0tNa4azJYAQQw,5481
|
|
241
|
+
ultralytics/solutions/instance_segmentation.py,sha256=poxfCKl4gm7pHhjwULOeIPIRy9q_wOxqwtnUXXE9NhQ,3778
|
|
242
242
|
ultralytics/solutions/object_blurrer.py,sha256=EZrv3oU68kEaahAxlhk9cF5ZKFtoVaW8bDB4Css9xe0,3981
|
|
243
243
|
ultralytics/solutions/object_counter.py,sha256=OpMSLlenDK-cLvCgCOoKbqMXIZrngyqP8DP6ZeEnWL8,9355
|
|
244
244
|
ultralytics/solutions/object_cropper.py,sha256=WRbrfXAR5aD6PQBqJ-BvcVaiaqta_9YeTlXN2dY274s,3510
|
|
245
|
-
ultralytics/solutions/parking_management.py,sha256=
|
|
245
|
+
ultralytics/solutions/parking_management.py,sha256=Q0fEFKlv6dKKWuw_4jmWaeHQVXGppzuU7Vr_HqVYqHM,13770
|
|
246
246
|
ultralytics/solutions/queue_management.py,sha256=NlVX6PMEaffjoZjfQrVyayaDUdtc0JF8GzTQrZFjpCg,4371
|
|
247
247
|
ultralytics/solutions/region_counter.py,sha256=IAvlFwEYoNftDzfBbdo5MzLwcuidOHW9oTGyRCDzMRc,6025
|
|
248
248
|
ultralytics/solutions/security_alarm.py,sha256=QjUIVBWcy094VTcOkk_zOq3BmKKOeIaHpVi_QMWo_3Q,6293
|
|
249
249
|
ultralytics/solutions/similarity_search.py,sha256=Q2FOBUtEokegiJHlfDbPP0bKxr5F-sHN3-IvskDoe00,9644
|
|
250
|
-
ultralytics/solutions/solutions.py,sha256=
|
|
250
|
+
ultralytics/solutions/solutions.py,sha256=ktLwDhC0y4k2FbNd0sk7Y8GcEvBu9wL3rXyFGwlbnIQ,36984
|
|
251
251
|
ultralytics/solutions/speed_estimation.py,sha256=WrZECxKAq6P4QpeTbhkp3-Rqjnox7tdR25fUxzozlpU,5861
|
|
252
252
|
ultralytics/solutions/streamlit_inference.py,sha256=utJOe0Weu44_ABF9rDnAjwLjKyn3gwfaYaxFfFbx-9c,13060
|
|
253
253
|
ultralytics/solutions/trackzone.py,sha256=oqv-zZL99RVUMcN5ViAPmadzX6QNdAEozYrrg2pqO6k,3903
|
|
@@ -257,26 +257,26 @@ ultralytics/trackers/__init__.py,sha256=n3BOO0TR-Sz5ANDYOkKDipM9nSHOePMEwqafbk-Y
|
|
|
257
257
|
ultralytics/trackers/basetrack.py,sha256=F-EW29F9E8GwXr5vzwLqW2rNwItu4KIx2MKce5pQXxI,4374
|
|
258
258
|
ultralytics/trackers/bot_sort.py,sha256=WImn-BOzGrK9dgMFfMPzKFE5awhXEB2VOi7AbOf_Cdc,11831
|
|
259
259
|
ultralytics/trackers/byte_tracker.py,sha256=Twmbe3EyqnIds211M84vtuuM1WgHXDykjTMeiAJZzC0,21117
|
|
260
|
-
ultralytics/trackers/track.py,sha256=
|
|
260
|
+
ultralytics/trackers/track.py,sha256=xte5lkVBbOnrZ_tVLsHUmzvtNjbdksTVeSFQtLCLt_M,4742
|
|
261
261
|
ultralytics/trackers/utils/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
|
|
262
262
|
ultralytics/trackers/utils/gmc.py,sha256=cvvhNXOhylVQti4pJQSNPx4yPqhhhw1k2yzY0JFl7Zo,13760
|
|
263
263
|
ultralytics/trackers/utils/kalman_filter.py,sha256=crgysL2bo0v1eTljOlP2YqIJDLBcHjl75MRpbxfaR_M,21514
|
|
264
264
|
ultralytics/trackers/utils/matching.py,sha256=x6uZOIx0O9oVmAcfY6tYMTJQE2cDTUlRR690Y5UkHLs,7129
|
|
265
|
-
ultralytics/utils/__init__.py,sha256=
|
|
265
|
+
ultralytics/utils/__init__.py,sha256=XLEK_pvptzNWhJaO8x0MWghREIyEDei0LOGnUnmU1Kg,55145
|
|
266
266
|
ultralytics/utils/autobatch.py,sha256=jiE4m_--H9UkXFDm_FqzcZk_hSTCGpS72XdVEKgZwAo,5114
|
|
267
267
|
ultralytics/utils/autodevice.py,sha256=rXlPuo-iX-vZ4BabmMGEGh9Uxpau4R7Zlt1KCo9Xfyc,8892
|
|
268
|
-
ultralytics/utils/benchmarks.py,sha256=
|
|
269
|
-
ultralytics/utils/checks.py,sha256=
|
|
268
|
+
ultralytics/utils/benchmarks.py,sha256=f4RykrjO1oEBxrTbH6qM_9vMxYKXO9F0ruFcM4xKF7A,32293
|
|
269
|
+
ultralytics/utils/checks.py,sha256=NWc0J-Nk4qHSVEXFDWfJkI7IjTNHFXajKjsSodDroBk,39411
|
|
270
270
|
ultralytics/utils/cpu.py,sha256=OksKOlX93AsbSsFuoYvLXRXgpkOibrZSwQyW6lipt4Q,3493
|
|
271
271
|
ultralytics/utils/dist.py,sha256=hOuY1-unhQAY-uWiZw3LWw36d1mqJuYK75NdlwB4oKE,4131
|
|
272
|
-
ultralytics/utils/downloads.py,sha256=
|
|
272
|
+
ultralytics/utils/downloads.py,sha256=TWXkYwR5hEpVMWL6fbjdywDmZe02WhyL_8YuLVce-uM,23069
|
|
273
273
|
ultralytics/utils/errors.py,sha256=dUZcTWpbJJHqEuWHM6IbeoJJ4TzA_yHBP8E7tEEpBVs,1388
|
|
274
274
|
ultralytics/utils/events.py,sha256=6vqs_iSxoXIhQ804sOjApNZmXwNW9FUFtjaHPY8ta10,4665
|
|
275
|
-
ultralytics/utils/files.py,sha256=
|
|
275
|
+
ultralytics/utils/files.py,sha256=u7pjz13wgkLSBfe_beeZrzar32_gaJWoIVa3nvY3mh8,8190
|
|
276
276
|
ultralytics/utils/git.py,sha256=UdqeIiiEzg1qkerAZrg5YtTYPuJYwrpxW9N_6Pq6s8U,5501
|
|
277
277
|
ultralytics/utils/instance.py,sha256=11mhefvTI9ftMqSirXuiViAi0Fxlo6v84qvNxfRNUoE,18862
|
|
278
278
|
ultralytics/utils/logger.py,sha256=T5iaNnaqbCvx_FZf1dhVkr5FVxyxb4vO17t4SJfCIhg,19132
|
|
279
|
-
ultralytics/utils/loss.py,sha256=
|
|
279
|
+
ultralytics/utils/loss.py,sha256=pb4NIzG-vz9MvH4EfdPc6hKFAnEIe6E4dhUZPtTXPHc,56559
|
|
280
280
|
ultralytics/utils/metrics.py,sha256=puMGn1LfVIlDvx5K7US4RtK8HYW6cRl9OznfV0nUPvk,69261
|
|
281
281
|
ultralytics/utils/nms.py,sha256=zv1rOzMF6WU8Kdk41VzNf1H1EMt_vZHcbDFbg3mnN2o,14248
|
|
282
282
|
ultralytics/utils/ops.py,sha256=4xqb7kwrAWm8c_zxOWP5JoXozgsA1Slk2s4XFwmEZCs,26089
|
|
@@ -286,7 +286,7 @@ ultralytics/utils/tal.py,sha256=vfcfSy78zdtHbGzlvo5UDx-sCwHLRdGBqDO3CX7ZiR0,2418
|
|
|
286
286
|
ultralytics/utils/torch_utils.py,sha256=dHvLaQopIOr9NcIWkLWPX36f5OAFR4thcqm379Zayfc,40278
|
|
287
287
|
ultralytics/utils/tqdm.py,sha256=f2W608Qpvgu6tFi28qylaZpcRv3IX8wTGY_8lgicaqY,16343
|
|
288
288
|
ultralytics/utils/triton.py,sha256=BQu3CD3OlT76d1OtmnX5slQU37VC1kzRvEtfI2saIQA,5211
|
|
289
|
-
ultralytics/utils/tuner.py,sha256=
|
|
289
|
+
ultralytics/utils/tuner.py,sha256=nRMmnyp0B0gVJzAXcpCxQUnwXjVp0WNiSJwxyR2xvQM,7303
|
|
290
290
|
ultralytics/utils/callbacks/__init__.py,sha256=hzL63Rce6VkZhP4Lcim9LKjadixaQG86nKqPhk7IkS0,242
|
|
291
291
|
ultralytics/utils/callbacks/base.py,sha256=floD31JHqHpiVabQiE76_hzC_j7KjtL4w_czkD1bLKc,6883
|
|
292
292
|
ultralytics/utils/callbacks/clearml.py,sha256=LjfNe4mswceCOpEGVLxqGXjkl_XGbef4awdcp4502RU,5831
|
|
@@ -295,16 +295,16 @@ ultralytics/utils/callbacks/dvc.py,sha256=YT0Sa5P8Huj8Fn9jM2P6MYzUY3PIVxsa5BInVi
|
|
|
295
295
|
ultralytics/utils/callbacks/hub.py,sha256=fVLqqr3ZM6hoYFlVMEeejfq1MWDrkWCskPFOG3HGILQ,4159
|
|
296
296
|
ultralytics/utils/callbacks/mlflow.py,sha256=wCXjQgdufp9LYujqMzLZOmIOur6kvrApHNeo9dA7t_g,5323
|
|
297
297
|
ultralytics/utils/callbacks/neptune.py,sha256=_vt3cMwDHCR-LyT3KtRikGpj6AG11oQ-skUUUUdZ74o,4391
|
|
298
|
-
ultralytics/utils/callbacks/platform.py,sha256=
|
|
298
|
+
ultralytics/utils/callbacks/platform.py,sha256=Ufws7Kp_MHh3jrz-Sx5q1KKQ-l1hoDnLi1_thZJsHPQ,16091
|
|
299
299
|
ultralytics/utils/callbacks/raytune.py,sha256=Y0dFyNZVRuFovSh7nkgUIHTQL3xIXOACElgHuYbg_5I,1278
|
|
300
|
-
ultralytics/utils/callbacks/tensorboard.py,sha256=
|
|
300
|
+
ultralytics/utils/callbacks/tensorboard.py,sha256=K7b6KtC7rimfzqFu-NDZ_55Tbd7eC6TckqQdTNPuQ6U,5039
|
|
301
301
|
ultralytics/utils/callbacks/wb.py,sha256=ghmL3gigOa-z_F54-TzMraKw9MAaYX-Wk4H8dLoRvX8,7705
|
|
302
302
|
ultralytics/utils/export/__init__.py,sha256=Cfh-PwVfTF_lwPp-Ss4wiX4z8Sm1XRPklsqdFfmTZ30,333
|
|
303
303
|
ultralytics/utils/export/engine.py,sha256=QoXPqnmQn6W5TOUAygOtCG63R9ExDG4-Df6X6W-_Mzo,10470
|
|
304
|
-
ultralytics/utils/export/imx.py,sha256=
|
|
304
|
+
ultralytics/utils/export/imx.py,sha256=VnMDO7c8ezBs91UDoLg9rR0oY8Uc7FujKpbdGxrzV18,13744
|
|
305
305
|
ultralytics/utils/export/tensorflow.py,sha256=xHEcEM3_VeYctyqkJCpgkqcNie1M8xLqcFKr6uANEEQ,9951
|
|
306
|
-
dgenerate_ultralytics_headless-8.4.
|
|
307
|
-
dgenerate_ultralytics_headless-8.4.
|
|
308
|
-
dgenerate_ultralytics_headless-8.4.
|
|
309
|
-
dgenerate_ultralytics_headless-8.4.
|
|
310
|
-
dgenerate_ultralytics_headless-8.4.
|
|
306
|
+
dgenerate_ultralytics_headless-8.4.4.dist-info/METADATA,sha256=wK7cNiOfQHx28uF-HEPMHSoxLT0azRn5P4dNsYFWyq4,40118
|
|
307
|
+
dgenerate_ultralytics_headless-8.4.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
308
|
+
dgenerate_ultralytics_headless-8.4.4.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
|
|
309
|
+
dgenerate_ultralytics_headless-8.4.4.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
|
|
310
|
+
dgenerate_ultralytics_headless-8.4.4.dist-info/RECORD,,
|
tests/test_exports.py
CHANGED
|
@@ -240,7 +240,6 @@ def test_export_mnn_matrix(task, int8, half, batch):
|
|
|
240
240
|
|
|
241
241
|
|
|
242
242
|
@pytest.mark.slow
|
|
243
|
-
@pytest.mark.skipif(ARM64, reason="NCNN not supported on ARM64") # https://github.com/Tencent/ncnn/issues/6509
|
|
244
243
|
@pytest.mark.skipif(not TORCH_2_0, reason="NCNN inference causes segfault on PyTorch<2.0")
|
|
245
244
|
def test_export_ncnn():
|
|
246
245
|
"""Test YOLO export to NCNN format."""
|
|
@@ -249,7 +248,6 @@ def test_export_ncnn():
|
|
|
249
248
|
|
|
250
249
|
|
|
251
250
|
@pytest.mark.slow
|
|
252
|
-
@pytest.mark.skipif(ARM64, reason="NCNN not supported on ARM64") # https://github.com/Tencent/ncnn/issues/6509
|
|
253
251
|
@pytest.mark.skipif(not TORCH_2_0, reason="NCNN inference causes segfault on PyTorch<2.0")
|
|
254
252
|
@pytest.mark.parametrize("task, half, batch", list(product(TASKS, [True, False], [1])))
|
|
255
253
|
def test_export_ncnn_matrix(task, half, batch):
|
ultralytics/__init__.py
CHANGED
ultralytics/cfg/__init__.py
CHANGED
|
@@ -90,13 +90,13 @@ SOLUTIONS_HELP_MSG = f"""
|
|
|
90
90
|
yolo solutions count source="path/to/video.mp4" region="[(20, 400), (1080, 400), (1080, 360), (20, 360)]"
|
|
91
91
|
|
|
92
92
|
2. Call heatmap solution
|
|
93
|
-
yolo solutions heatmap colormap=cv2.COLORMAP_PARULA model=
|
|
93
|
+
yolo solutions heatmap colormap=cv2.COLORMAP_PARULA model=yolo26n.pt
|
|
94
94
|
|
|
95
95
|
3. Call queue management solution
|
|
96
|
-
yolo solutions queue region="[(20, 400), (1080, 400), (1080, 360), (20, 360)]" model=
|
|
96
|
+
yolo solutions queue region="[(20, 400), (1080, 400), (1080, 360), (20, 360)]" model=yolo26n.pt
|
|
97
97
|
|
|
98
98
|
4. Call workout monitoring solution for push-ups
|
|
99
|
-
yolo solutions workout model=
|
|
99
|
+
yolo solutions workout model=yolo26n-pose.pt kpts=[6, 8, 10]
|
|
100
100
|
|
|
101
101
|
5. Generate analytical graphs
|
|
102
102
|
yolo solutions analytics analytics_type="pie"
|
|
@@ -118,16 +118,16 @@ CLI_HELP_MSG = f"""
|
|
|
118
118
|
See all ARGS at https://docs.ultralytics.com/usage/cfg or with 'yolo cfg'
|
|
119
119
|
|
|
120
120
|
1. Train a detection model for 10 epochs with an initial learning_rate of 0.01
|
|
121
|
-
yolo train data=coco8.yaml model=
|
|
121
|
+
yolo train data=coco8.yaml model=yolo26n.pt epochs=10 lr0=0.01
|
|
122
122
|
|
|
123
123
|
2. Predict a YouTube video using a pretrained segmentation model at image size 320:
|
|
124
|
-
yolo predict model=
|
|
124
|
+
yolo predict model=yolo26n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320
|
|
125
125
|
|
|
126
126
|
3. Validate a pretrained detection model at batch-size 1 and image size 640:
|
|
127
|
-
yolo val model=
|
|
127
|
+
yolo val model=yolo26n.pt data=coco8.yaml batch=1 imgsz=640
|
|
128
128
|
|
|
129
|
-
4. Export a
|
|
130
|
-
yolo export model=
|
|
129
|
+
4. Export a YOLO26n classification model to ONNX format at image size 224 by 128 (no TASK required)
|
|
130
|
+
yolo export model=yolo26n-cls.pt format=onnx imgsz=224,128
|
|
131
131
|
|
|
132
132
|
5. Ultralytics solutions usage
|
|
133
133
|
yolo solutions count or any of {list(SOLUTION_MAP.keys())[1:-1]} source="path/to/video.mp4"
|
|
@@ -305,8 +305,6 @@ def get_cfg(
|
|
|
305
305
|
# Merge overrides
|
|
306
306
|
if overrides:
|
|
307
307
|
overrides = cfg2dict(overrides)
|
|
308
|
-
if "save_dir" not in cfg:
|
|
309
|
-
overrides.pop("save_dir", None) # special override keys to ignore
|
|
310
308
|
check_dict_alignment(cfg, overrides)
|
|
311
309
|
cfg = {**cfg, **overrides} # merge cfg and overrides dicts (prefer overrides)
|
|
312
310
|
|
|
@@ -414,7 +412,7 @@ def get_save_dir(args: SimpleNamespace, name: str | None = None) -> Path:
|
|
|
414
412
|
nested = args.project and len(Path(args.project).parts) > 1 # e.g. "user/project" or "org\repo"
|
|
415
413
|
project = runs / args.project if nested else args.project or runs
|
|
416
414
|
name = name or args.name or f"{args.mode}"
|
|
417
|
-
save_dir = increment_path(Path(project) / name, exist_ok=args.exist_ok if RANK in {-1, 0} else True
|
|
415
|
+
save_dir = increment_path(Path(project) / name, exist_ok=args.exist_ok if RANK in {-1, 0} else True)
|
|
418
416
|
|
|
419
417
|
return Path(save_dir).resolve() # resolve to display full path in console
|
|
420
418
|
|
|
@@ -494,7 +492,7 @@ def check_dict_alignment(
|
|
|
494
492
|
base_keys, custom_keys = (frozenset(x.keys()) for x in (base, custom))
|
|
495
493
|
# Allow 'augmentations' as a valid custom parameter for custom Albumentations transforms
|
|
496
494
|
if allowed_custom_keys is None:
|
|
497
|
-
allowed_custom_keys = {"augmentations"}
|
|
495
|
+
allowed_custom_keys = {"augmentations", "save_dir"}
|
|
498
496
|
if mismatched := [k for k in custom_keys if k not in base_keys and k not in allowed_custom_keys]:
|
|
499
497
|
from difflib import get_close_matches
|
|
500
498
|
|
|
@@ -606,7 +604,7 @@ def handle_yolo_settings(args: list[str]) -> None:
|
|
|
606
604
|
|
|
607
605
|
Examples:
|
|
608
606
|
>>> handle_yolo_settings(["reset"]) # Reset YOLO settings
|
|
609
|
-
>>> handle_yolo_settings(["default_cfg_path=
|
|
607
|
+
>>> handle_yolo_settings(["default_cfg_path=yolo26n.yaml"]) # Update a specific setting
|
|
610
608
|
|
|
611
609
|
Notes:
|
|
612
610
|
- If no arguments are provided, the function will display the current settings.
|
|
@@ -651,7 +649,7 @@ def handle_yolo_solutions(args: list[str]) -> None:
|
|
|
651
649
|
>>> handle_yolo_solutions(["analytics", "conf=0.25", "source=path/to/video.mp4"])
|
|
652
650
|
|
|
653
651
|
Run inference with custom configuration, requires Streamlit version 1.29.0 or higher.
|
|
654
|
-
>>> handle_yolo_solutions(["inference", "model=
|
|
652
|
+
>>> handle_yolo_solutions(["inference", "model=yolo26n.pt"])
|
|
655
653
|
|
|
656
654
|
Notes:
|
|
657
655
|
- Arguments can be provided in the format 'key=value' or as boolean flags
|
|
@@ -709,7 +707,7 @@ def handle_yolo_solutions(args: list[str]) -> None:
|
|
|
709
707
|
str(ROOT / "solutions/streamlit_inference.py"),
|
|
710
708
|
"--server.headless",
|
|
711
709
|
"true",
|
|
712
|
-
overrides.pop("model", "
|
|
710
|
+
overrides.pop("model", "yolo26n.pt"),
|
|
713
711
|
]
|
|
714
712
|
)
|
|
715
713
|
else:
|
|
@@ -760,9 +758,9 @@ def parse_key_value_pair(pair: str = "key=value") -> tuple:
|
|
|
760
758
|
AssertionError: If the value is missing or empty.
|
|
761
759
|
|
|
762
760
|
Examples:
|
|
763
|
-
>>> key, value = parse_key_value_pair("model=
|
|
761
|
+
>>> key, value = parse_key_value_pair("model=yolo26n.pt")
|
|
764
762
|
>>> print(f"Key: {key}, Value: {value}")
|
|
765
|
-
Key: model, Value:
|
|
763
|
+
Key: model, Value: yolo26n.pt
|
|
766
764
|
|
|
767
765
|
>>> key, value = parse_key_value_pair("epochs=100")
|
|
768
766
|
>>> print(f"Key: {key}, Value: {value}")
|
|
@@ -834,13 +832,13 @@ def entrypoint(debug: str = "") -> None:
|
|
|
834
832
|
|
|
835
833
|
Examples:
|
|
836
834
|
Train a detection model for 10 epochs with an initial learning_rate of 0.01:
|
|
837
|
-
>>> entrypoint("train data=coco8.yaml model=
|
|
835
|
+
>>> entrypoint("train data=coco8.yaml model=yolo26n.pt epochs=10 lr0=0.01")
|
|
838
836
|
|
|
839
837
|
Predict a YouTube video using a pretrained segmentation model at image size 320:
|
|
840
|
-
>>> entrypoint("predict model=
|
|
838
|
+
>>> entrypoint("predict model=yolo26n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320")
|
|
841
839
|
|
|
842
840
|
Validate a pretrained detection model at batch-size 1 and image size 640:
|
|
843
|
-
>>> entrypoint("val model=
|
|
841
|
+
>>> entrypoint("val model=yolo26n.pt data=coco8.yaml batch=1 imgsz=640")
|
|
844
842
|
|
|
845
843
|
Notes:
|
|
846
844
|
- If no arguments are passed, the function will display the usage help message.
|
|
@@ -935,7 +933,7 @@ def entrypoint(debug: str = "") -> None:
|
|
|
935
933
|
# Model
|
|
936
934
|
model = overrides.pop("model", DEFAULT_CFG.model)
|
|
937
935
|
if model is None:
|
|
938
|
-
model = "
|
|
936
|
+
model = "yolo26n.pt"
|
|
939
937
|
LOGGER.warning(f"'model' argument is missing. Using default 'model={model}'.")
|
|
940
938
|
overrides["model"] = model
|
|
941
939
|
stem = Path(model).stem.lower()
|
|
@@ -1024,5 +1022,5 @@ def copy_default_cfg() -> None:
|
|
|
1024
1022
|
|
|
1025
1023
|
|
|
1026
1024
|
if __name__ == "__main__":
|
|
1027
|
-
# Example: entrypoint(debug='yolo predict model=
|
|
1025
|
+
# Example: entrypoint(debug='yolo predict model=yolo26n.pt')
|
|
1028
1026
|
entrypoint(debug="")
|