dgenerate-ultralytics-headless 8.3.214__py3-none-any.whl → 8.4.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dgenerate_ultralytics_headless-8.3.214.dist-info → dgenerate_ultralytics_headless-8.4.7.dist-info}/METADATA +64 -74
- dgenerate_ultralytics_headless-8.4.7.dist-info/RECORD +311 -0
- {dgenerate_ultralytics_headless-8.3.214.dist-info → dgenerate_ultralytics_headless-8.4.7.dist-info}/WHEEL +1 -1
- tests/__init__.py +7 -9
- tests/conftest.py +8 -15
- tests/test_cli.py +1 -1
- tests/test_cuda.py +13 -10
- tests/test_engine.py +9 -9
- tests/test_exports.py +65 -13
- tests/test_integrations.py +13 -13
- tests/test_python.py +125 -69
- tests/test_solutions.py +161 -152
- ultralytics/__init__.py +1 -1
- ultralytics/cfg/__init__.py +86 -92
- ultralytics/cfg/datasets/Argoverse.yaml +7 -6
- ultralytics/cfg/datasets/DOTAv1.5.yaml +1 -1
- ultralytics/cfg/datasets/DOTAv1.yaml +1 -1
- ultralytics/cfg/datasets/ImageNet.yaml +1 -1
- ultralytics/cfg/datasets/TT100K.yaml +346 -0
- ultralytics/cfg/datasets/VOC.yaml +15 -16
- ultralytics/cfg/datasets/african-wildlife.yaml +1 -1
- ultralytics/cfg/datasets/coco-pose.yaml +21 -0
- ultralytics/cfg/datasets/coco12-formats.yaml +101 -0
- ultralytics/cfg/datasets/coco128-seg.yaml +1 -1
- ultralytics/cfg/datasets/coco8-pose.yaml +21 -0
- ultralytics/cfg/datasets/dog-pose.yaml +28 -0
- ultralytics/cfg/datasets/dota8-multispectral.yaml +1 -1
- ultralytics/cfg/datasets/dota8.yaml +2 -2
- ultralytics/cfg/datasets/hand-keypoints.yaml +26 -2
- ultralytics/cfg/datasets/kitti.yaml +27 -0
- ultralytics/cfg/datasets/lvis.yaml +5 -5
- ultralytics/cfg/datasets/open-images-v7.yaml +1 -1
- ultralytics/cfg/datasets/tiger-pose.yaml +16 -0
- ultralytics/cfg/datasets/xView.yaml +16 -16
- ultralytics/cfg/default.yaml +4 -2
- ultralytics/cfg/models/11/yolo11-pose.yaml +1 -1
- ultralytics/cfg/models/11/yoloe-11-seg.yaml +2 -2
- ultralytics/cfg/models/11/yoloe-11.yaml +2 -2
- ultralytics/cfg/models/26/yolo26-cls.yaml +33 -0
- ultralytics/cfg/models/26/yolo26-obb.yaml +52 -0
- ultralytics/cfg/models/26/yolo26-p2.yaml +60 -0
- ultralytics/cfg/models/26/yolo26-p6.yaml +62 -0
- ultralytics/cfg/models/26/yolo26-pose.yaml +53 -0
- ultralytics/cfg/models/26/yolo26-seg.yaml +52 -0
- ultralytics/cfg/models/26/yolo26.yaml +52 -0
- ultralytics/cfg/models/26/yoloe-26-seg.yaml +53 -0
- ultralytics/cfg/models/26/yoloe-26.yaml +53 -0
- ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +1 -1
- ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +1 -1
- ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +1 -1
- ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +1 -1
- ultralytics/cfg/models/v10/yolov10b.yaml +2 -2
- ultralytics/cfg/models/v10/yolov10l.yaml +2 -2
- ultralytics/cfg/models/v10/yolov10m.yaml +2 -2
- ultralytics/cfg/models/v10/yolov10n.yaml +2 -2
- ultralytics/cfg/models/v10/yolov10s.yaml +2 -2
- ultralytics/cfg/models/v10/yolov10x.yaml +2 -2
- ultralytics/cfg/models/v3/yolov3-tiny.yaml +1 -1
- ultralytics/cfg/models/v6/yolov6.yaml +1 -1
- ultralytics/cfg/models/v8/yoloe-v8-seg.yaml +9 -6
- ultralytics/cfg/models/v8/yoloe-v8.yaml +9 -6
- ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +1 -1
- ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +1 -1
- ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +2 -2
- ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +2 -2
- ultralytics/cfg/models/v8/yolov8-ghost.yaml +2 -2
- ultralytics/cfg/models/v8/yolov8-obb.yaml +1 -1
- ultralytics/cfg/models/v8/yolov8-p2.yaml +1 -1
- ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +1 -1
- ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +1 -1
- ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +1 -1
- ultralytics/cfg/models/v8/yolov8-world.yaml +1 -1
- ultralytics/cfg/models/v8/yolov8-worldv2.yaml +6 -6
- ultralytics/cfg/models/v9/yolov9s.yaml +1 -1
- ultralytics/data/__init__.py +4 -4
- ultralytics/data/annotator.py +5 -6
- ultralytics/data/augment.py +300 -475
- ultralytics/data/base.py +18 -26
- ultralytics/data/build.py +147 -25
- ultralytics/data/converter.py +108 -87
- ultralytics/data/dataset.py +47 -75
- ultralytics/data/loaders.py +42 -49
- ultralytics/data/split.py +5 -6
- ultralytics/data/split_dota.py +8 -15
- ultralytics/data/utils.py +36 -45
- ultralytics/engine/exporter.py +351 -263
- ultralytics/engine/model.py +186 -225
- ultralytics/engine/predictor.py +45 -54
- ultralytics/engine/results.py +198 -325
- ultralytics/engine/trainer.py +165 -106
- ultralytics/engine/tuner.py +41 -43
- ultralytics/engine/validator.py +55 -38
- ultralytics/hub/__init__.py +16 -19
- ultralytics/hub/auth.py +6 -12
- ultralytics/hub/google/__init__.py +7 -10
- ultralytics/hub/session.py +15 -25
- ultralytics/hub/utils.py +5 -8
- ultralytics/models/__init__.py +1 -1
- ultralytics/models/fastsam/__init__.py +1 -1
- ultralytics/models/fastsam/model.py +8 -10
- ultralytics/models/fastsam/predict.py +18 -30
- ultralytics/models/fastsam/utils.py +1 -2
- ultralytics/models/fastsam/val.py +5 -7
- ultralytics/models/nas/__init__.py +1 -1
- ultralytics/models/nas/model.py +5 -8
- ultralytics/models/nas/predict.py +7 -9
- ultralytics/models/nas/val.py +1 -2
- ultralytics/models/rtdetr/__init__.py +1 -1
- ultralytics/models/rtdetr/model.py +5 -8
- ultralytics/models/rtdetr/predict.py +15 -19
- ultralytics/models/rtdetr/train.py +10 -13
- ultralytics/models/rtdetr/val.py +21 -23
- ultralytics/models/sam/__init__.py +15 -2
- ultralytics/models/sam/amg.py +14 -20
- ultralytics/models/sam/build.py +26 -19
- ultralytics/models/sam/build_sam3.py +377 -0
- ultralytics/models/sam/model.py +29 -32
- ultralytics/models/sam/modules/blocks.py +83 -144
- ultralytics/models/sam/modules/decoders.py +19 -37
- ultralytics/models/sam/modules/encoders.py +44 -101
- ultralytics/models/sam/modules/memory_attention.py +16 -30
- ultralytics/models/sam/modules/sam.py +200 -73
- ultralytics/models/sam/modules/tiny_encoder.py +64 -83
- ultralytics/models/sam/modules/transformer.py +18 -28
- ultralytics/models/sam/modules/utils.py +174 -50
- ultralytics/models/sam/predict.py +2248 -350
- ultralytics/models/sam/sam3/__init__.py +3 -0
- ultralytics/models/sam/sam3/decoder.py +546 -0
- ultralytics/models/sam/sam3/encoder.py +529 -0
- ultralytics/models/sam/sam3/geometry_encoders.py +415 -0
- ultralytics/models/sam/sam3/maskformer_segmentation.py +286 -0
- ultralytics/models/sam/sam3/model_misc.py +199 -0
- ultralytics/models/sam/sam3/necks.py +129 -0
- ultralytics/models/sam/sam3/sam3_image.py +339 -0
- ultralytics/models/sam/sam3/text_encoder_ve.py +307 -0
- ultralytics/models/sam/sam3/vitdet.py +547 -0
- ultralytics/models/sam/sam3/vl_combiner.py +160 -0
- ultralytics/models/utils/loss.py +14 -26
- ultralytics/models/utils/ops.py +13 -17
- ultralytics/models/yolo/__init__.py +1 -1
- ultralytics/models/yolo/classify/predict.py +10 -13
- ultralytics/models/yolo/classify/train.py +12 -33
- ultralytics/models/yolo/classify/val.py +30 -29
- ultralytics/models/yolo/detect/predict.py +9 -12
- ultralytics/models/yolo/detect/train.py +17 -23
- ultralytics/models/yolo/detect/val.py +77 -59
- ultralytics/models/yolo/model.py +43 -60
- ultralytics/models/yolo/obb/predict.py +7 -16
- ultralytics/models/yolo/obb/train.py +14 -17
- ultralytics/models/yolo/obb/val.py +40 -37
- ultralytics/models/yolo/pose/__init__.py +1 -1
- ultralytics/models/yolo/pose/predict.py +7 -22
- ultralytics/models/yolo/pose/train.py +13 -16
- ultralytics/models/yolo/pose/val.py +39 -58
- ultralytics/models/yolo/segment/predict.py +17 -21
- ultralytics/models/yolo/segment/train.py +7 -10
- ultralytics/models/yolo/segment/val.py +95 -47
- ultralytics/models/yolo/world/train.py +8 -14
- ultralytics/models/yolo/world/train_world.py +11 -34
- ultralytics/models/yolo/yoloe/__init__.py +7 -7
- ultralytics/models/yolo/yoloe/predict.py +16 -23
- ultralytics/models/yolo/yoloe/train.py +36 -44
- ultralytics/models/yolo/yoloe/train_seg.py +11 -11
- ultralytics/models/yolo/yoloe/val.py +15 -20
- ultralytics/nn/__init__.py +7 -7
- ultralytics/nn/autobackend.py +159 -85
- ultralytics/nn/modules/__init__.py +68 -60
- ultralytics/nn/modules/activation.py +4 -6
- ultralytics/nn/modules/block.py +260 -224
- ultralytics/nn/modules/conv.py +52 -97
- ultralytics/nn/modules/head.py +831 -299
- ultralytics/nn/modules/transformer.py +76 -88
- ultralytics/nn/modules/utils.py +16 -21
- ultralytics/nn/tasks.py +180 -195
- ultralytics/nn/text_model.py +45 -69
- ultralytics/optim/__init__.py +5 -0
- ultralytics/optim/muon.py +338 -0
- ultralytics/solutions/__init__.py +12 -12
- ultralytics/solutions/ai_gym.py +13 -19
- ultralytics/solutions/analytics.py +15 -16
- ultralytics/solutions/config.py +6 -7
- ultralytics/solutions/distance_calculation.py +10 -13
- ultralytics/solutions/heatmap.py +8 -14
- ultralytics/solutions/instance_segmentation.py +6 -9
- ultralytics/solutions/object_blurrer.py +7 -10
- ultralytics/solutions/object_counter.py +12 -19
- ultralytics/solutions/object_cropper.py +8 -14
- ultralytics/solutions/parking_management.py +34 -32
- ultralytics/solutions/queue_management.py +10 -12
- ultralytics/solutions/region_counter.py +9 -12
- ultralytics/solutions/security_alarm.py +15 -20
- ultralytics/solutions/similarity_search.py +10 -15
- ultralytics/solutions/solutions.py +77 -76
- ultralytics/solutions/speed_estimation.py +7 -10
- ultralytics/solutions/streamlit_inference.py +2 -4
- ultralytics/solutions/templates/similarity-search.html +7 -18
- ultralytics/solutions/trackzone.py +7 -10
- ultralytics/solutions/vision_eye.py +5 -8
- ultralytics/trackers/__init__.py +1 -1
- ultralytics/trackers/basetrack.py +3 -5
- ultralytics/trackers/bot_sort.py +10 -27
- ultralytics/trackers/byte_tracker.py +21 -37
- ultralytics/trackers/track.py +4 -7
- ultralytics/trackers/utils/gmc.py +11 -22
- ultralytics/trackers/utils/kalman_filter.py +37 -48
- ultralytics/trackers/utils/matching.py +12 -15
- ultralytics/utils/__init__.py +124 -124
- ultralytics/utils/autobatch.py +2 -4
- ultralytics/utils/autodevice.py +17 -18
- ultralytics/utils/benchmarks.py +57 -71
- ultralytics/utils/callbacks/base.py +8 -10
- ultralytics/utils/callbacks/clearml.py +5 -13
- ultralytics/utils/callbacks/comet.py +32 -46
- ultralytics/utils/callbacks/dvc.py +13 -18
- ultralytics/utils/callbacks/mlflow.py +4 -5
- ultralytics/utils/callbacks/neptune.py +7 -15
- ultralytics/utils/callbacks/platform.py +423 -38
- ultralytics/utils/callbacks/raytune.py +3 -4
- ultralytics/utils/callbacks/tensorboard.py +25 -31
- ultralytics/utils/callbacks/wb.py +16 -14
- ultralytics/utils/checks.py +127 -85
- ultralytics/utils/cpu.py +3 -8
- ultralytics/utils/dist.py +9 -12
- ultralytics/utils/downloads.py +25 -33
- ultralytics/utils/errors.py +6 -14
- ultralytics/utils/events.py +2 -4
- ultralytics/utils/export/__init__.py +4 -236
- ultralytics/utils/export/engine.py +246 -0
- ultralytics/utils/export/imx.py +117 -63
- ultralytics/utils/export/tensorflow.py +231 -0
- ultralytics/utils/files.py +26 -30
- ultralytics/utils/git.py +9 -11
- ultralytics/utils/instance.py +30 -51
- ultralytics/utils/logger.py +212 -114
- ultralytics/utils/loss.py +601 -215
- ultralytics/utils/metrics.py +128 -156
- ultralytics/utils/nms.py +13 -16
- ultralytics/utils/ops.py +117 -166
- ultralytics/utils/patches.py +75 -21
- ultralytics/utils/plotting.py +75 -80
- ultralytics/utils/tal.py +125 -59
- ultralytics/utils/torch_utils.py +53 -79
- ultralytics/utils/tqdm.py +24 -21
- ultralytics/utils/triton.py +13 -19
- ultralytics/utils/tuner.py +19 -10
- dgenerate_ultralytics_headless-8.3.214.dist-info/RECORD +0 -283
- {dgenerate_ultralytics_headless-8.3.214.dist-info → dgenerate_ultralytics_headless-8.4.7.dist-info}/entry_points.txt +0 -0
- {dgenerate_ultralytics_headless-8.3.214.dist-info → dgenerate_ultralytics_headless-8.4.7.dist-info}/licenses/LICENSE +0 -0
- {dgenerate_ultralytics_headless-8.3.214.dist-info → dgenerate_ultralytics_headless-8.4.7.dist-info}/top_level.txt +0 -0
|
@@ -16,8 +16,7 @@ except (ImportError, AssertionError):
|
|
|
16
16
|
|
|
17
17
|
|
|
18
18
|
def _custom_table(x, y, classes, title="Precision Recall Curve", x_title="Recall", y_title="Precision"):
|
|
19
|
-
"""
|
|
20
|
-
Create and log a custom metric visualization to wandb.plot.pr_curve.
|
|
19
|
+
"""Create and log a custom metric visualization to wandb.plot.pr_curve.
|
|
21
20
|
|
|
22
21
|
This function crafts a custom metric visualization that mimics the behavior of the default wandb precision-recall
|
|
23
22
|
curve while allowing for enhanced customization. The visual metric is useful for monitoring model performance across
|
|
@@ -61,11 +60,10 @@ def _plot_curve(
|
|
|
61
60
|
num_x=100,
|
|
62
61
|
only_mean=False,
|
|
63
62
|
):
|
|
64
|
-
"""
|
|
65
|
-
Log a metric curve visualization.
|
|
63
|
+
"""Log a metric curve visualization.
|
|
66
64
|
|
|
67
|
-
This function generates a metric curve based on input data and logs the visualization to wandb.
|
|
68
|
-
|
|
65
|
+
This function generates a metric curve based on input data and logs the visualization to wandb. The curve can
|
|
66
|
+
represent aggregated data (mean) or individual class data, depending on the 'only_mean' flag.
|
|
69
67
|
|
|
70
68
|
Args:
|
|
71
69
|
x (np.ndarray): Data points for the x-axis with length N.
|
|
@@ -105,15 +103,14 @@ def _plot_curve(
|
|
|
105
103
|
|
|
106
104
|
|
|
107
105
|
def _log_plots(plots, step):
|
|
108
|
-
"""
|
|
109
|
-
Log plots to WandB at a specific step if they haven't been logged already.
|
|
106
|
+
"""Log plots to WandB at a specific step if they haven't been logged already.
|
|
110
107
|
|
|
111
|
-
This function checks each plot in the input dictionary against previously processed plots and logs
|
|
112
|
-
|
|
108
|
+
This function checks each plot in the input dictionary against previously processed plots and logs new or updated
|
|
109
|
+
plots to WandB at the specified step.
|
|
113
110
|
|
|
114
111
|
Args:
|
|
115
|
-
plots (dict): Dictionary of plots to log, where keys are plot names and values are dictionaries
|
|
116
|
-
|
|
112
|
+
plots (dict): Dictionary of plots to log, where keys are plot names and values are dictionaries containing plot
|
|
113
|
+
metadata including timestamps.
|
|
117
114
|
step (int): The step/epoch at which to log the plots in the WandB run.
|
|
118
115
|
|
|
119
116
|
Notes:
|
|
@@ -131,20 +128,25 @@ def _log_plots(plots, step):
|
|
|
131
128
|
def on_pretrain_routine_start(trainer):
|
|
132
129
|
"""Initialize and start wandb project if module is present."""
|
|
133
130
|
if not wb.run:
|
|
131
|
+
from datetime import datetime
|
|
132
|
+
|
|
133
|
+
name = str(trainer.args.name).replace("/", "-").replace(" ", "_")
|
|
134
134
|
wb.init(
|
|
135
135
|
project=str(trainer.args.project).replace("/", "-") if trainer.args.project else "Ultralytics",
|
|
136
|
-
name=
|
|
136
|
+
name=name,
|
|
137
137
|
config=vars(trainer.args),
|
|
138
|
+
id=f"{name}_{datetime.now().strftime('%Y%m%d_%H%M%S')}", # add unique id
|
|
139
|
+
dir=str(trainer.save_dir),
|
|
138
140
|
)
|
|
139
141
|
|
|
140
142
|
|
|
141
143
|
def on_fit_epoch_end(trainer):
|
|
142
144
|
"""Log training metrics and model information at the end of an epoch."""
|
|
143
|
-
wb.run.log(trainer.metrics, step=trainer.epoch + 1)
|
|
144
145
|
_log_plots(trainer.plots, step=trainer.epoch + 1)
|
|
145
146
|
_log_plots(trainer.validator.plots, step=trainer.epoch + 1)
|
|
146
147
|
if trainer.epoch == 0:
|
|
147
148
|
wb.run.log(model_info_for_loggers(trainer), step=trainer.epoch + 1)
|
|
149
|
+
wb.run.log(trainer.metrics, step=trainer.epoch + 1, commit=True) # commit forces sync
|
|
148
150
|
|
|
149
151
|
|
|
150
152
|
def on_train_epoch_end(trainer):
|
ultralytics/utils/checks.py
CHANGED
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
+
import ast
|
|
5
6
|
import functools
|
|
6
7
|
import glob
|
|
7
8
|
import inspect
|
|
@@ -11,6 +12,7 @@ import platform
|
|
|
11
12
|
import re
|
|
12
13
|
import shutil
|
|
13
14
|
import subprocess
|
|
15
|
+
import sys
|
|
14
16
|
import time
|
|
15
17
|
from importlib import metadata
|
|
16
18
|
from pathlib import Path
|
|
@@ -53,8 +55,7 @@ from ultralytics.utils import (
|
|
|
53
55
|
|
|
54
56
|
|
|
55
57
|
def parse_requirements(file_path=ROOT.parent / "requirements.txt", package=""):
|
|
56
|
-
"""
|
|
57
|
-
Parse a requirements.txt file, ignoring lines that start with '#' and any text after '#'.
|
|
58
|
+
"""Parse a requirements.txt file, ignoring lines that start with '#' and any text after '#'.
|
|
58
59
|
|
|
59
60
|
Args:
|
|
60
61
|
file_path (Path): Path to the requirements.txt file.
|
|
@@ -86,8 +87,7 @@ def parse_requirements(file_path=ROOT.parent / "requirements.txt", package=""):
|
|
|
86
87
|
|
|
87
88
|
@functools.lru_cache
|
|
88
89
|
def parse_version(version="0.0.0") -> tuple:
|
|
89
|
-
"""
|
|
90
|
-
Convert a version string to a tuple of integers, ignoring any extra non-numeric string attached to the version.
|
|
90
|
+
"""Convert a version string to a tuple of integers, ignoring any extra non-numeric string attached to the version.
|
|
91
91
|
|
|
92
92
|
Args:
|
|
93
93
|
version (str): Version string, i.e. '2.0.1+cpu'
|
|
@@ -103,8 +103,7 @@ def parse_version(version="0.0.0") -> tuple:
|
|
|
103
103
|
|
|
104
104
|
|
|
105
105
|
def is_ascii(s) -> bool:
|
|
106
|
-
"""
|
|
107
|
-
Check if a string is composed of only ASCII characters.
|
|
106
|
+
"""Check if a string is composed of only ASCII characters.
|
|
108
107
|
|
|
109
108
|
Args:
|
|
110
109
|
s (str | list | tuple | dict): Input to be checked (all are converted to string for checking).
|
|
@@ -116,8 +115,7 @@ def is_ascii(s) -> bool:
|
|
|
116
115
|
|
|
117
116
|
|
|
118
117
|
def check_imgsz(imgsz, stride=32, min_dim=1, max_dim=2, floor=0):
|
|
119
|
-
"""
|
|
120
|
-
Verify image size is a multiple of the given stride in each dimension. If the image size is not a multiple of the
|
|
118
|
+
"""Verify image size is a multiple of the given stride in each dimension. If the image size is not a multiple of the
|
|
121
119
|
stride, update it to the nearest multiple of the stride that is greater than or equal to the given floor value.
|
|
122
120
|
|
|
123
121
|
Args:
|
|
@@ -139,7 +137,7 @@ def check_imgsz(imgsz, stride=32, min_dim=1, max_dim=2, floor=0):
|
|
|
139
137
|
elif isinstance(imgsz, (list, tuple)):
|
|
140
138
|
imgsz = list(imgsz)
|
|
141
139
|
elif isinstance(imgsz, str): # i.e. '640' or '[640,640]'
|
|
142
|
-
imgsz = [int(imgsz)] if imgsz.isnumeric() else
|
|
140
|
+
imgsz = [int(imgsz)] if imgsz.isnumeric() else ast.literal_eval(imgsz)
|
|
143
141
|
else:
|
|
144
142
|
raise TypeError(
|
|
145
143
|
f"'imgsz={imgsz}' is of invalid type {type(imgsz).__name__}. "
|
|
@@ -187,8 +185,7 @@ def check_version(
|
|
|
187
185
|
verbose: bool = False,
|
|
188
186
|
msg: str = "",
|
|
189
187
|
) -> bool:
|
|
190
|
-
"""
|
|
191
|
-
Check current version against the required version or range.
|
|
188
|
+
"""Check current version against the required version or range.
|
|
192
189
|
|
|
193
190
|
Args:
|
|
194
191
|
current (str): Current version or package name to get version from.
|
|
@@ -268,8 +265,7 @@ def check_version(
|
|
|
268
265
|
|
|
269
266
|
|
|
270
267
|
def check_latest_pypi_version(package_name="ultralytics"):
|
|
271
|
-
"""
|
|
272
|
-
Return the latest version of a PyPI package without downloading or installing it.
|
|
268
|
+
"""Return the latest version of a PyPI package without downloading or installing it.
|
|
273
269
|
|
|
274
270
|
Args:
|
|
275
271
|
package_name (str): The name of the package to find the latest version for.
|
|
@@ -289,8 +285,7 @@ def check_latest_pypi_version(package_name="ultralytics"):
|
|
|
289
285
|
|
|
290
286
|
|
|
291
287
|
def check_pip_update_available():
|
|
292
|
-
"""
|
|
293
|
-
Check if a new version of the ultralytics package is available on PyPI.
|
|
288
|
+
"""Check if a new version of the ultralytics package is available on PyPI.
|
|
294
289
|
|
|
295
290
|
Returns:
|
|
296
291
|
(bool): True if an update is available, False otherwise.
|
|
@@ -314,8 +309,7 @@ def check_pip_update_available():
|
|
|
314
309
|
@ThreadingLocked()
|
|
315
310
|
@functools.lru_cache
|
|
316
311
|
def check_font(font="Arial.ttf"):
|
|
317
|
-
"""
|
|
318
|
-
Find font locally or download to user's configuration directory if it does not already exist.
|
|
312
|
+
"""Find font locally or download to user's configuration directory if it does not already exist.
|
|
319
313
|
|
|
320
314
|
Args:
|
|
321
315
|
font (str): Path or name of font.
|
|
@@ -344,8 +338,7 @@ def check_font(font="Arial.ttf"):
|
|
|
344
338
|
|
|
345
339
|
|
|
346
340
|
def check_python(minimum: str = "3.8.0", hard: bool = True, verbose: bool = False) -> bool:
|
|
347
|
-
"""
|
|
348
|
-
Check current python version against the required minimum version.
|
|
341
|
+
"""Check current python version against the required minimum version.
|
|
349
342
|
|
|
350
343
|
Args:
|
|
351
344
|
minimum (str): Required minimum version of python.
|
|
@@ -359,14 +352,53 @@ def check_python(minimum: str = "3.8.0", hard: bool = True, verbose: bool = Fals
|
|
|
359
352
|
|
|
360
353
|
|
|
361
354
|
@TryExcept()
|
|
362
|
-
def
|
|
355
|
+
def check_apt_requirements(requirements):
|
|
356
|
+
"""Check if apt packages are installed and install missing ones.
|
|
357
|
+
|
|
358
|
+
Args:
|
|
359
|
+
requirements: List of apt package names to check and install
|
|
363
360
|
"""
|
|
364
|
-
|
|
361
|
+
prefix = colorstr("red", "bold", "apt requirements:")
|
|
362
|
+
# Check which packages are missing
|
|
363
|
+
missing_packages = []
|
|
364
|
+
for package in requirements:
|
|
365
|
+
try:
|
|
366
|
+
# Use dpkg -l to check if package is installed
|
|
367
|
+
result = subprocess.run(["dpkg", "-l", package], capture_output=True, text=True, check=False)
|
|
368
|
+
# Check if package is installed (look for "ii" status)
|
|
369
|
+
if result.returncode != 0 or not any(
|
|
370
|
+
line.startswith("ii") and package in line for line in result.stdout.splitlines()
|
|
371
|
+
):
|
|
372
|
+
missing_packages.append(package)
|
|
373
|
+
except Exception:
|
|
374
|
+
# If check fails, assume package is not installed
|
|
375
|
+
missing_packages.append(package)
|
|
376
|
+
|
|
377
|
+
# Install missing packages if any
|
|
378
|
+
if missing_packages:
|
|
379
|
+
LOGGER.info(
|
|
380
|
+
f"{prefix} Ultralytics requirement{'s' * (len(missing_packages) > 1)} {missing_packages} not found, attempting AutoUpdate..."
|
|
381
|
+
)
|
|
382
|
+
# Optionally update package list first
|
|
383
|
+
cmd = (["sudo"] if is_sudo_available() else []) + ["apt", "update"]
|
|
384
|
+
result = subprocess.run(cmd, check=True, capture_output=True, text=True)
|
|
385
|
+
|
|
386
|
+
# Build and run the install command
|
|
387
|
+
cmd = (["sudo"] if is_sudo_available() else []) + ["apt", "install", "-y"] + missing_packages
|
|
388
|
+
result = subprocess.run(cmd, check=True, capture_output=True, text=True)
|
|
389
|
+
|
|
390
|
+
LOGGER.info(f"{prefix} AutoUpdate success ✅")
|
|
391
|
+
LOGGER.warning(f"{prefix} {colorstr('bold', 'Restart runtime or rerun command for updates to take effect')}\n")
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
@TryExcept()
|
|
395
|
+
def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=(), install=True, cmds=""):
|
|
396
|
+
"""Check if installed dependencies meet Ultralytics YOLO models requirements and attempt to auto-update if needed.
|
|
365
397
|
|
|
366
398
|
Args:
|
|
367
399
|
requirements (Path | str | list[str|tuple] | tuple[str]): Path to a requirements.txt file, a single package
|
|
368
|
-
requirement as a string, a list of package requirements as strings, or a list containing strings and
|
|
369
|
-
|
|
400
|
+
requirement as a string, a list of package requirements as strings, or a list containing strings and tuples
|
|
401
|
+
of interchangeable packages.
|
|
370
402
|
exclude (tuple): Tuple of package names to exclude from checking.
|
|
371
403
|
install (bool): If True, attempt to auto-update packages that don't meet requirements.
|
|
372
404
|
cmds (str): Additional commands to pass to the pip install command when auto-updating.
|
|
@@ -387,6 +419,11 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
|
|
|
387
419
|
>>> check_requirements([("onnxruntime", "onnxruntime-gpu"), "numpy"])
|
|
388
420
|
"""
|
|
389
421
|
prefix = colorstr("red", "bold", "requirements:")
|
|
422
|
+
|
|
423
|
+
if os.environ.get("ULTRALYTICS_SKIP_REQUIREMENTS_CHECKS", "0") == "1":
|
|
424
|
+
LOGGER.info(f"{prefix} ULTRALYTICS_SKIP_REQUIREMENTS_CHECKS=1 detected, skipping requirements check.")
|
|
425
|
+
return True
|
|
426
|
+
|
|
390
427
|
if isinstance(requirements, Path): # requirements.txt file
|
|
391
428
|
file = requirements.resolve()
|
|
392
429
|
assert file.exists(), f"{prefix} {file} not found, check failed."
|
|
@@ -417,22 +454,18 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
|
|
|
417
454
|
def attempt_install(packages, commands, use_uv):
|
|
418
455
|
"""Attempt package installation with uv if available, falling back to pip."""
|
|
419
456
|
if use_uv:
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
457
|
+
# Use --python to explicitly target current interpreter (venv or system)
|
|
458
|
+
# This ensures correct installation when VIRTUAL_ENV env var isn't set
|
|
459
|
+
return subprocess.check_output(
|
|
460
|
+
f'uv pip install --no-cache-dir --python "{sys.executable}" {packages} {commands} '
|
|
461
|
+
f"--index-strategy=unsafe-best-match --break-system-packages",
|
|
462
|
+
shell=True,
|
|
463
|
+
stderr=subprocess.STDOUT,
|
|
464
|
+
text=True,
|
|
423
465
|
)
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
if e.stderr and "No virtual environment found" in e.stderr:
|
|
428
|
-
return subprocess.check_output(
|
|
429
|
-
base.replace("uv pip install", "uv pip install --system"),
|
|
430
|
-
shell=True,
|
|
431
|
-
stderr=subprocess.PIPE,
|
|
432
|
-
text=True,
|
|
433
|
-
)
|
|
434
|
-
raise
|
|
435
|
-
return subprocess.check_output(f"pip install --no-cache-dir {packages} {commands}", shell=True, text=True)
|
|
466
|
+
return subprocess.check_output(
|
|
467
|
+
f"pip install --no-cache-dir {packages} {commands}", shell=True, stderr=subprocess.STDOUT, text=True
|
|
468
|
+
)
|
|
436
469
|
|
|
437
470
|
s = " ".join(f'"{x}"' for x in pkgs) # console string
|
|
438
471
|
if s:
|
|
@@ -443,14 +476,18 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
|
|
|
443
476
|
try:
|
|
444
477
|
t = time.time()
|
|
445
478
|
assert ONLINE, "AutoUpdate skipped (offline)"
|
|
446
|
-
|
|
479
|
+
use_uv = not ARM64 and check_uv() # uv fails on ARM64
|
|
480
|
+
LOGGER.info(attempt_install(s, cmds, use_uv=use_uv))
|
|
447
481
|
dt = time.time() - t
|
|
448
482
|
LOGGER.info(f"{prefix} AutoUpdate success ✅ {dt:.1f}s")
|
|
449
483
|
LOGGER.warning(
|
|
450
484
|
f"{prefix} {colorstr('bold', 'Restart runtime or rerun command for updates to take effect')}\n"
|
|
451
485
|
)
|
|
452
486
|
except Exception as e:
|
|
453
|
-
|
|
487
|
+
msg = f"{prefix} ❌ {e}"
|
|
488
|
+
if hasattr(e, "output") and e.output:
|
|
489
|
+
msg += f"\n{e.output}"
|
|
490
|
+
LOGGER.warning(msg)
|
|
454
491
|
return False
|
|
455
492
|
else:
|
|
456
493
|
return False
|
|
@@ -459,8 +496,7 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
|
|
|
459
496
|
|
|
460
497
|
|
|
461
498
|
def check_torchvision():
|
|
462
|
-
"""
|
|
463
|
-
Check the installed versions of PyTorch and Torchvision to ensure they're compatible.
|
|
499
|
+
"""Check the installed versions of PyTorch and Torchvision to ensure they're compatible.
|
|
464
500
|
|
|
465
501
|
This function checks the installed versions of PyTorch and Torchvision, and warns if they're incompatible according
|
|
466
502
|
to the compatibility table based on: https://github.com/pytorch/vision#installation.
|
|
@@ -494,9 +530,8 @@ def check_torchvision():
|
|
|
494
530
|
)
|
|
495
531
|
|
|
496
532
|
|
|
497
|
-
def check_suffix(file="
|
|
498
|
-
"""
|
|
499
|
-
Check file(s) for acceptable suffix.
|
|
533
|
+
def check_suffix(file="yolo26n.pt", suffix=".pt", msg=""):
|
|
534
|
+
"""Check file(s) for acceptable suffix.
|
|
500
535
|
|
|
501
536
|
Args:
|
|
502
537
|
file (str | list[str]): File or list of files to check.
|
|
@@ -512,8 +547,7 @@ def check_suffix(file="yolo11n.pt", suffix=".pt", msg=""):
|
|
|
512
547
|
|
|
513
548
|
|
|
514
549
|
def check_yolov5u_filename(file: str, verbose: bool = True):
|
|
515
|
-
"""
|
|
516
|
-
Replace legacy YOLOv5 filenames with updated YOLOv5u filenames.
|
|
550
|
+
"""Replace legacy YOLOv5 filenames with updated YOLOv5u filenames.
|
|
517
551
|
|
|
518
552
|
Args:
|
|
519
553
|
file (str): Filename to check and potentially update.
|
|
@@ -540,8 +574,7 @@ def check_yolov5u_filename(file: str, verbose: bool = True):
|
|
|
540
574
|
|
|
541
575
|
|
|
542
576
|
def check_model_file_from_stem(model="yolo11n"):
|
|
543
|
-
"""
|
|
544
|
-
Return a model filename from a valid model stem.
|
|
577
|
+
"""Return a model filename from a valid model stem.
|
|
545
578
|
|
|
546
579
|
Args:
|
|
547
580
|
model (str): Model stem to check.
|
|
@@ -551,16 +584,15 @@ def check_model_file_from_stem(model="yolo11n"):
|
|
|
551
584
|
"""
|
|
552
585
|
path = Path(model)
|
|
553
586
|
if not path.suffix and path.stem in downloads.GITHUB_ASSETS_STEMS:
|
|
554
|
-
return path.with_suffix(".pt") # add suffix, i.e.
|
|
587
|
+
return path.with_suffix(".pt") # add suffix, i.e. yolo26n -> yolo26n.pt
|
|
555
588
|
return model
|
|
556
589
|
|
|
557
590
|
|
|
558
591
|
def check_file(file, suffix="", download=True, download_dir=".", hard=True):
|
|
559
|
-
"""
|
|
560
|
-
Search/download file (if necessary), check suffix (if provided), and return path.
|
|
592
|
+
"""Search/download file (if necessary), check suffix (if provided), and return path.
|
|
561
593
|
|
|
562
594
|
Args:
|
|
563
|
-
file (str): File name or path.
|
|
595
|
+
file (str): File name or path, URL, platform URI (ul://), or GCS path (gs://).
|
|
564
596
|
suffix (str | tuple): Acceptable suffix or tuple of suffixes to validate against the file.
|
|
565
597
|
download (bool): Whether to download the file if it doesn't exist locally.
|
|
566
598
|
download_dir (str): Directory to download the file to.
|
|
@@ -578,7 +610,26 @@ def check_file(file, suffix="", download=True, download_dir=".", hard=True):
|
|
|
578
610
|
or file.lower().startswith("grpc://")
|
|
579
611
|
): # file exists or gRPC Triton images
|
|
580
612
|
return file
|
|
581
|
-
elif download and file.lower().startswith(
|
|
613
|
+
elif download and file.lower().startswith("ul://"): # Ultralytics Platform URI
|
|
614
|
+
from ultralytics.utils.callbacks.platform import resolve_platform_uri
|
|
615
|
+
|
|
616
|
+
url = resolve_platform_uri(file, hard=hard) # Convert to signed HTTPS URL
|
|
617
|
+
if url is None:
|
|
618
|
+
return [] # Not found, soft fail (consistent with file search behavior)
|
|
619
|
+
# Use URI path for unique directory structure: ul://user/project/model -> user/project/model/filename.pt
|
|
620
|
+
uri_path = file[5:] # Remove "ul://"
|
|
621
|
+
local_file = Path(download_dir) / uri_path / url2file(url)
|
|
622
|
+
if local_file.exists():
|
|
623
|
+
LOGGER.info(f"Found {clean_url(url)} locally at {local_file}")
|
|
624
|
+
else:
|
|
625
|
+
local_file.parent.mkdir(parents=True, exist_ok=True)
|
|
626
|
+
downloads.safe_download(url=url, file=local_file, unzip=False)
|
|
627
|
+
return str(local_file)
|
|
628
|
+
elif download and file.lower().startswith(
|
|
629
|
+
("https://", "http://", "rtsp://", "rtmp://", "tcp://", "gs://")
|
|
630
|
+
): # download
|
|
631
|
+
if file.startswith("gs://"):
|
|
632
|
+
file = "https://storage.googleapis.com/" + file[5:] # convert gs:// to public HTTPS URL
|
|
582
633
|
url = file # warning: Pathlib turns :// -> :/
|
|
583
634
|
file = Path(download_dir) / url2file(file) # '%2F' to '/', split https://url.com/file.txt?auth
|
|
584
635
|
if file.exists():
|
|
@@ -596,8 +647,7 @@ def check_file(file, suffix="", download=True, download_dir=".", hard=True):
|
|
|
596
647
|
|
|
597
648
|
|
|
598
649
|
def check_yaml(file, suffix=(".yaml", ".yml"), hard=True):
|
|
599
|
-
"""
|
|
600
|
-
Search/download YAML file (if necessary) and return path, checking suffix.
|
|
650
|
+
"""Search/download YAML file (if necessary) and return path, checking suffix.
|
|
601
651
|
|
|
602
652
|
Args:
|
|
603
653
|
file (str | Path): File name or path.
|
|
@@ -611,8 +661,7 @@ def check_yaml(file, suffix=(".yaml", ".yml"), hard=True):
|
|
|
611
661
|
|
|
612
662
|
|
|
613
663
|
def check_is_path_safe(basedir, path):
|
|
614
|
-
"""
|
|
615
|
-
Check if the resolved path is under the intended directory to prevent path traversal.
|
|
664
|
+
"""Check if the resolved path is under the intended directory to prevent path traversal.
|
|
616
665
|
|
|
617
666
|
Args:
|
|
618
667
|
basedir (Path | str): The intended directory.
|
|
@@ -629,8 +678,7 @@ def check_is_path_safe(basedir, path):
|
|
|
629
678
|
|
|
630
679
|
@functools.lru_cache
|
|
631
680
|
def check_imshow(warn=False):
|
|
632
|
-
"""
|
|
633
|
-
Check if environment supports image displays.
|
|
681
|
+
"""Check if environment supports image displays.
|
|
634
682
|
|
|
635
683
|
Args:
|
|
636
684
|
warn (bool): Whether to warn if environment doesn't support image displays.
|
|
@@ -654,8 +702,7 @@ def check_imshow(warn=False):
|
|
|
654
702
|
|
|
655
703
|
|
|
656
704
|
def check_yolo(verbose=True, device=""):
|
|
657
|
-
"""
|
|
658
|
-
Return a human-readable YOLO software and hardware summary.
|
|
705
|
+
"""Return a human-readable YOLO software and hardware summary.
|
|
659
706
|
|
|
660
707
|
Args:
|
|
661
708
|
verbose (bool): Whether to print verbose information.
|
|
@@ -672,7 +719,7 @@ def check_yolo(verbose=True, device=""):
|
|
|
672
719
|
# System info
|
|
673
720
|
gib = 1 << 30 # bytes per GiB
|
|
674
721
|
ram = psutil.virtual_memory().total
|
|
675
|
-
total,
|
|
722
|
+
total, _used, free = shutil.disk_usage("/")
|
|
676
723
|
s = f"({os.cpu_count()} CPUs, {ram / gib:.1f} GB RAM, {(total - free) / gib:.1f}/{total / gib:.1f} GB disk)"
|
|
677
724
|
try:
|
|
678
725
|
from IPython import display
|
|
@@ -691,8 +738,7 @@ def check_yolo(verbose=True, device=""):
|
|
|
691
738
|
|
|
692
739
|
|
|
693
740
|
def collect_system_info():
|
|
694
|
-
"""
|
|
695
|
-
Collect and print relevant system information including OS, Python, RAM, CPU, and CUDA.
|
|
741
|
+
"""Collect and print relevant system information including OS, Python, RAM, CPU, and CUDA.
|
|
696
742
|
|
|
697
743
|
Returns:
|
|
698
744
|
(dict): Dictionary containing system information.
|
|
@@ -705,7 +751,7 @@ def collect_system_info():
|
|
|
705
751
|
gib = 1 << 30 # bytes per GiB
|
|
706
752
|
cuda = torch.cuda.is_available()
|
|
707
753
|
check_yolo()
|
|
708
|
-
total,
|
|
754
|
+
total, _used, free = shutil.disk_usage("/")
|
|
709
755
|
|
|
710
756
|
info_dict = {
|
|
711
757
|
"OS": platform.platform(),
|
|
@@ -752,8 +798,7 @@ def collect_system_info():
|
|
|
752
798
|
|
|
753
799
|
|
|
754
800
|
def check_amp(model):
|
|
755
|
-
"""
|
|
756
|
-
Check the PyTorch Automatic Mixed Precision (AMP) functionality of a YOLO model.
|
|
801
|
+
"""Check the PyTorch Automatic Mixed Precision (AMP) functionality of a YOLO model.
|
|
757
802
|
|
|
758
803
|
If the checks fail, it means there are anomalies with AMP on the system that may cause NaN losses or zero-mAP
|
|
759
804
|
results, so AMP will be disabled during training.
|
|
@@ -767,7 +812,7 @@ def check_amp(model):
|
|
|
767
812
|
Examples:
|
|
768
813
|
>>> from ultralytics import YOLO
|
|
769
814
|
>>> from ultralytics.utils.checks import check_amp
|
|
770
|
-
>>> model = YOLO("
|
|
815
|
+
>>> model = YOLO("yolo26n.pt").model.cuda()
|
|
771
816
|
>>> check_amp(model)
|
|
772
817
|
"""
|
|
773
818
|
from ultralytics.utils.torch_utils import autocast
|
|
@@ -806,14 +851,14 @@ def check_amp(model):
|
|
|
806
851
|
try:
|
|
807
852
|
from ultralytics import YOLO
|
|
808
853
|
|
|
809
|
-
assert amp_allclose(YOLO("
|
|
854
|
+
assert amp_allclose(YOLO("yolo26n.pt"), im)
|
|
810
855
|
LOGGER.info(f"{prefix}checks passed ✅")
|
|
811
856
|
except ConnectionError:
|
|
812
|
-
LOGGER.warning(f"{prefix}checks skipped. Offline and unable to download
|
|
857
|
+
LOGGER.warning(f"{prefix}checks skipped. Offline and unable to download YOLO26n for AMP checks. {warning_msg}")
|
|
813
858
|
except (AttributeError, ModuleNotFoundError):
|
|
814
859
|
LOGGER.warning(
|
|
815
860
|
f"{prefix}checks skipped. "
|
|
816
|
-
f"Unable to load
|
|
861
|
+
f"Unable to load YOLO26n for AMP checks due to possible Ultralytics package modifications. {warning_msg}"
|
|
817
862
|
)
|
|
818
863
|
except AssertionError:
|
|
819
864
|
LOGGER.error(
|
|
@@ -849,8 +894,7 @@ def check_multiple_install():
|
|
|
849
894
|
|
|
850
895
|
|
|
851
896
|
def print_args(args: dict | None = None, show_file=True, show_func=False):
|
|
852
|
-
"""
|
|
853
|
-
Print function arguments (optional args dict).
|
|
897
|
+
"""Print function arguments (optional args dict).
|
|
854
898
|
|
|
855
899
|
Args:
|
|
856
900
|
args (dict, optional): Arguments to print.
|
|
@@ -876,8 +920,7 @@ def print_args(args: dict | None = None, show_file=True, show_func=False):
|
|
|
876
920
|
|
|
877
921
|
|
|
878
922
|
def cuda_device_count() -> int:
|
|
879
|
-
"""
|
|
880
|
-
Get the number of NVIDIA GPUs available in the environment.
|
|
923
|
+
"""Get the number of NVIDIA GPUs available in the environment.
|
|
881
924
|
|
|
882
925
|
Returns:
|
|
883
926
|
(int): The number of NVIDIA GPUs available.
|
|
@@ -902,8 +945,7 @@ def cuda_device_count() -> int:
|
|
|
902
945
|
|
|
903
946
|
|
|
904
947
|
def cuda_is_available() -> bool:
|
|
905
|
-
"""
|
|
906
|
-
Check if CUDA is available in the environment.
|
|
948
|
+
"""Check if CUDA is available in the environment.
|
|
907
949
|
|
|
908
950
|
Returns:
|
|
909
951
|
(bool): True if one or more NVIDIA GPUs are available, False otherwise.
|
|
@@ -912,8 +954,7 @@ def cuda_is_available() -> bool:
|
|
|
912
954
|
|
|
913
955
|
|
|
914
956
|
def is_rockchip():
|
|
915
|
-
"""
|
|
916
|
-
Check if the current environment is running on a Rockchip SoC.
|
|
957
|
+
"""Check if the current environment is running on a Rockchip SoC.
|
|
917
958
|
|
|
918
959
|
Returns:
|
|
919
960
|
(bool): True if running on a Rockchip SoC, False otherwise.
|
|
@@ -923,7 +964,7 @@ def is_rockchip():
|
|
|
923
964
|
with open("/proc/device-tree/compatible") as f:
|
|
924
965
|
dev_str = f.read()
|
|
925
966
|
*_, soc = dev_str.split(",")
|
|
926
|
-
if soc.replace("\x00", "") in RKNN_CHIPS:
|
|
967
|
+
if soc.replace("\x00", "").split("-", 1)[0] in RKNN_CHIPS:
|
|
927
968
|
return True
|
|
928
969
|
except OSError:
|
|
929
970
|
return False
|
|
@@ -932,8 +973,7 @@ def is_rockchip():
|
|
|
932
973
|
|
|
933
974
|
|
|
934
975
|
def is_intel():
|
|
935
|
-
"""
|
|
936
|
-
Check if the system has Intel hardware (CPU or GPU).
|
|
976
|
+
"""Check if the system has Intel hardware (CPU or GPU).
|
|
937
977
|
|
|
938
978
|
Returns:
|
|
939
979
|
(bool): True if Intel hardware is detected, False otherwise.
|
|
@@ -953,8 +993,7 @@ def is_intel():
|
|
|
953
993
|
|
|
954
994
|
|
|
955
995
|
def is_sudo_available() -> bool:
|
|
956
|
-
"""
|
|
957
|
-
Check if the sudo command is available in the environment.
|
|
996
|
+
"""Check if the sudo command is available in the environment.
|
|
958
997
|
|
|
959
998
|
Returns:
|
|
960
999
|
(bool): True if the sudo command is available, False otherwise.
|
|
@@ -971,8 +1010,11 @@ check_torchvision() # check torch-torchvision compatibility
|
|
|
971
1010
|
|
|
972
1011
|
# Define constants
|
|
973
1012
|
IS_PYTHON_3_8 = PYTHON_VERSION.startswith("3.8")
|
|
1013
|
+
IS_PYTHON_3_9 = PYTHON_VERSION.startswith("3.9")
|
|
1014
|
+
IS_PYTHON_3_10 = PYTHON_VERSION.startswith("3.10")
|
|
974
1015
|
IS_PYTHON_3_12 = PYTHON_VERSION.startswith("3.12")
|
|
975
1016
|
IS_PYTHON_3_13 = PYTHON_VERSION.startswith("3.13")
|
|
976
1017
|
|
|
1018
|
+
IS_PYTHON_MINIMUM_3_9 = check_python("3.9", hard=False)
|
|
977
1019
|
IS_PYTHON_MINIMUM_3_10 = check_python("3.10", hard=False)
|
|
978
1020
|
IS_PYTHON_MINIMUM_3_12 = check_python("3.12", hard=False)
|
ultralytics/utils/cpu.py
CHANGED
|
@@ -10,8 +10,7 @@ from pathlib import Path
|
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class CPUInfo:
|
|
13
|
-
"""
|
|
14
|
-
Provide cross-platform CPU brand and model information.
|
|
13
|
+
"""Provide cross-platform CPU brand and model information.
|
|
15
14
|
|
|
16
15
|
Query platform-specific sources to retrieve a human-readable CPU descriptor and normalize it for consistent
|
|
17
16
|
presentation across macOS, Linux, and Windows. If platform-specific probing fails, generic platform identifiers are
|
|
@@ -71,13 +70,9 @@ class CPUInfo:
|
|
|
71
70
|
"""Normalize and prettify a raw CPU descriptor string."""
|
|
72
71
|
s = re.sub(r"\s+", " ", s.strip())
|
|
73
72
|
s = s.replace("(TM)", "").replace("(tm)", "").replace("(R)", "").replace("(r)", "").strip()
|
|
74
|
-
|
|
75
|
-
m = re.search(r"(Intel.*?i\d[\w-]*) CPU @ ([\d.]+GHz)", s, re.I)
|
|
76
|
-
if m:
|
|
73
|
+
if m := re.search(r"(Intel.*?i\d[\w-]*) CPU @ ([\d.]+GHz)", s, re.I):
|
|
77
74
|
return f"{m.group(1)} {m.group(2)}"
|
|
78
|
-
|
|
79
|
-
m = re.search(r"(AMD.*?Ryzen.*?[\w-]*) CPU @ ([\d.]+GHz)", s, re.I)
|
|
80
|
-
if m:
|
|
75
|
+
if m := re.search(r"(AMD.*?Ryzen.*?[\w-]*) CPU @ ([\d.]+GHz)", s, re.I):
|
|
81
76
|
return f"{m.group(1)} {m.group(2)}"
|
|
82
77
|
return s
|
|
83
78
|
|
ultralytics/utils/dist.py
CHANGED
|
@@ -10,8 +10,7 @@ from .torch_utils import TORCH_1_9
|
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
def find_free_network_port() -> int:
|
|
13
|
-
"""
|
|
14
|
-
Find a free port on localhost.
|
|
13
|
+
"""Find a free port on localhost.
|
|
15
14
|
|
|
16
15
|
It is useful in single-node training when we don't want to connect to a real main node but have to set the
|
|
17
16
|
`MASTER_PORT` environment variable.
|
|
@@ -27,11 +26,10 @@ def find_free_network_port() -> int:
|
|
|
27
26
|
|
|
28
27
|
|
|
29
28
|
def generate_ddp_file(trainer):
|
|
30
|
-
"""
|
|
31
|
-
Generate a DDP (Distributed Data Parallel) file for multi-GPU training.
|
|
29
|
+
"""Generate a DDP (Distributed Data Parallel) file for multi-GPU training.
|
|
32
30
|
|
|
33
|
-
This function creates a temporary Python file that enables distributed training across multiple GPUs.
|
|
34
|
-
|
|
31
|
+
This function creates a temporary Python file that enables distributed training across multiple GPUs. The file
|
|
32
|
+
contains the necessary configuration to initialize the trainer in a distributed environment.
|
|
35
33
|
|
|
36
34
|
Args:
|
|
37
35
|
trainer (ultralytics.engine.trainer.BaseTrainer): The trainer containing training configuration and arguments.
|
|
@@ -51,6 +49,7 @@ def generate_ddp_file(trainer):
|
|
|
51
49
|
|
|
52
50
|
content = f"""
|
|
53
51
|
# Ultralytics Multi-GPU training temp file (should be automatically deleted after use)
|
|
52
|
+
from pathlib import Path, PosixPath # For model arguments stored as Path instead of str
|
|
54
53
|
overrides = {vars(trainer.args)}
|
|
55
54
|
|
|
56
55
|
if __name__ == "__main__":
|
|
@@ -77,8 +76,7 @@ if __name__ == "__main__":
|
|
|
77
76
|
|
|
78
77
|
|
|
79
78
|
def generate_ddp_command(trainer):
|
|
80
|
-
"""
|
|
81
|
-
Generate command for distributed training.
|
|
79
|
+
"""Generate command for distributed training.
|
|
82
80
|
|
|
83
81
|
Args:
|
|
84
82
|
trainer (ultralytics.engine.trainer.BaseTrainer): The trainer containing configuration for distributed training.
|
|
@@ -108,11 +106,10 @@ def generate_ddp_command(trainer):
|
|
|
108
106
|
|
|
109
107
|
|
|
110
108
|
def ddp_cleanup(trainer, file):
|
|
111
|
-
"""
|
|
112
|
-
Delete temporary file if created during distributed data parallel (DDP) training.
|
|
109
|
+
"""Delete temporary file if created during distributed data parallel (DDP) training.
|
|
113
110
|
|
|
114
|
-
This function checks if the provided file contains the trainer's ID in its name, indicating it was created
|
|
115
|
-
|
|
111
|
+
This function checks if the provided file contains the trainer's ID in its name, indicating it was created as a
|
|
112
|
+
temporary file for DDP training, and deletes it if so.
|
|
116
113
|
|
|
117
114
|
Args:
|
|
118
115
|
trainer (ultralytics.engine.trainer.BaseTrainer): The trainer used for distributed training.
|