dgenerate-ultralytics-headless 8.3.189__py3-none-any.whl → 8.3.191__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dgenerate_ultralytics_headless-8.3.189.dist-info → dgenerate_ultralytics_headless-8.3.191.dist-info}/METADATA +1 -1
- {dgenerate_ultralytics_headless-8.3.189.dist-info → dgenerate_ultralytics_headless-8.3.191.dist-info}/RECORD +111 -109
- tests/test_cuda.py +6 -5
- tests/test_exports.py +1 -6
- tests/test_python.py +1 -4
- tests/test_solutions.py +1 -1
- ultralytics/__init__.py +1 -1
- ultralytics/cfg/__init__.py +16 -14
- ultralytics/cfg/datasets/VisDrone.yaml +4 -4
- ultralytics/data/annotator.py +6 -6
- ultralytics/data/augment.py +53 -51
- ultralytics/data/base.py +15 -13
- ultralytics/data/build.py +7 -4
- ultralytics/data/converter.py +9 -10
- ultralytics/data/dataset.py +24 -22
- ultralytics/data/loaders.py +13 -11
- ultralytics/data/split.py +4 -3
- ultralytics/data/split_dota.py +14 -12
- ultralytics/data/utils.py +31 -25
- ultralytics/engine/exporter.py +7 -4
- ultralytics/engine/model.py +16 -14
- ultralytics/engine/predictor.py +9 -7
- ultralytics/engine/results.py +59 -57
- ultralytics/engine/trainer.py +7 -0
- ultralytics/engine/tuner.py +4 -3
- ultralytics/engine/validator.py +3 -1
- ultralytics/hub/__init__.py +6 -2
- ultralytics/hub/auth.py +2 -2
- ultralytics/hub/google/__init__.py +9 -8
- ultralytics/hub/session.py +11 -11
- ultralytics/hub/utils.py +8 -9
- ultralytics/models/fastsam/model.py +8 -6
- ultralytics/models/nas/model.py +5 -3
- ultralytics/models/rtdetr/train.py +4 -3
- ultralytics/models/rtdetr/val.py +6 -4
- ultralytics/models/sam/amg.py +13 -10
- ultralytics/models/sam/model.py +3 -2
- ultralytics/models/sam/modules/blocks.py +21 -21
- ultralytics/models/sam/modules/decoders.py +11 -11
- ultralytics/models/sam/modules/encoders.py +25 -25
- ultralytics/models/sam/modules/memory_attention.py +9 -8
- ultralytics/models/sam/modules/sam.py +8 -10
- ultralytics/models/sam/modules/tiny_encoder.py +21 -20
- ultralytics/models/sam/modules/transformer.py +6 -5
- ultralytics/models/sam/modules/utils.py +7 -5
- ultralytics/models/sam/predict.py +32 -31
- ultralytics/models/utils/loss.py +29 -27
- ultralytics/models/utils/ops.py +10 -8
- ultralytics/models/yolo/classify/train.py +7 -5
- ultralytics/models/yolo/classify/val.py +10 -8
- ultralytics/models/yolo/detect/predict.py +3 -3
- ultralytics/models/yolo/detect/train.py +8 -6
- ultralytics/models/yolo/detect/val.py +23 -21
- ultralytics/models/yolo/model.py +14 -14
- ultralytics/models/yolo/obb/train.py +5 -3
- ultralytics/models/yolo/obb/val.py +13 -10
- ultralytics/models/yolo/pose/train.py +7 -5
- ultralytics/models/yolo/pose/val.py +11 -9
- ultralytics/models/yolo/segment/train.py +4 -5
- ultralytics/models/yolo/segment/val.py +12 -10
- ultralytics/models/yolo/world/train.py +9 -7
- ultralytics/models/yolo/yoloe/train.py +7 -6
- ultralytics/models/yolo/yoloe/val.py +10 -8
- ultralytics/nn/autobackend.py +40 -52
- ultralytics/nn/modules/__init__.py +3 -3
- ultralytics/nn/modules/block.py +12 -12
- ultralytics/nn/modules/conv.py +4 -3
- ultralytics/nn/modules/head.py +46 -38
- ultralytics/nn/modules/transformer.py +22 -21
- ultralytics/nn/tasks.py +2 -2
- ultralytics/nn/text_model.py +6 -5
- ultralytics/solutions/analytics.py +7 -5
- ultralytics/solutions/config.py +12 -10
- ultralytics/solutions/distance_calculation.py +3 -3
- ultralytics/solutions/heatmap.py +4 -2
- ultralytics/solutions/object_counter.py +5 -3
- ultralytics/solutions/parking_management.py +4 -2
- ultralytics/solutions/region_counter.py +7 -5
- ultralytics/solutions/similarity_search.py +5 -3
- ultralytics/solutions/solutions.py +38 -36
- ultralytics/solutions/streamlit_inference.py +8 -7
- ultralytics/trackers/bot_sort.py +11 -9
- ultralytics/trackers/byte_tracker.py +17 -15
- ultralytics/trackers/utils/gmc.py +4 -3
- ultralytics/utils/__init__.py +27 -77
- ultralytics/utils/autobatch.py +3 -2
- ultralytics/utils/autodevice.py +10 -10
- ultralytics/utils/benchmarks.py +11 -10
- ultralytics/utils/callbacks/comet.py +9 -9
- ultralytics/utils/callbacks/platform.py +2 -1
- ultralytics/utils/checks.py +20 -29
- ultralytics/utils/downloads.py +2 -2
- ultralytics/utils/export.py +12 -11
- ultralytics/utils/files.py +8 -7
- ultralytics/utils/git.py +139 -0
- ultralytics/utils/instance.py +8 -7
- ultralytics/utils/logger.py +7 -6
- ultralytics/utils/loss.py +15 -13
- ultralytics/utils/metrics.py +62 -62
- ultralytics/utils/nms.py +346 -0
- ultralytics/utils/ops.py +83 -251
- ultralytics/utils/patches.py +6 -4
- ultralytics/utils/plotting.py +18 -16
- ultralytics/utils/tal.py +1 -1
- ultralytics/utils/torch_utils.py +4 -2
- ultralytics/utils/tqdm.py +47 -33
- ultralytics/utils/triton.py +3 -2
- {dgenerate_ultralytics_headless-8.3.189.dist-info → dgenerate_ultralytics_headless-8.3.191.dist-info}/WHEEL +0 -0
- {dgenerate_ultralytics_headless-8.3.189.dist-info → dgenerate_ultralytics_headless-8.3.191.dist-info}/entry_points.txt +0 -0
- {dgenerate_ultralytics_headless-8.3.189.dist-info → dgenerate_ultralytics_headless-8.3.191.dist-info}/licenses/LICENSE +0 -0
- {dgenerate_ultralytics_headless-8.3.189.dist-info → dgenerate_ultralytics_headless-8.3.191.dist-info}/top_level.txt +0 -0
ultralytics/utils/__init__.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
import contextlib
|
4
6
|
import importlib.metadata
|
5
7
|
import inspect
|
@@ -8,7 +10,7 @@ import logging
|
|
8
10
|
import os
|
9
11
|
import platform
|
10
12
|
import re
|
11
|
-
import
|
13
|
+
import socket
|
12
14
|
import sys
|
13
15
|
import threading
|
14
16
|
import time
|
@@ -16,7 +18,6 @@ from functools import lru_cache
|
|
16
18
|
from pathlib import Path
|
17
19
|
from threading import Lock
|
18
20
|
from types import SimpleNamespace
|
19
|
-
from typing import Union
|
20
21
|
from urllib.parse import unquote
|
21
22
|
|
22
23
|
import cv2
|
@@ -24,6 +25,7 @@ import numpy as np
|
|
24
25
|
import torch
|
25
26
|
|
26
27
|
from ultralytics import __version__
|
28
|
+
from ultralytics.utils.git import GitRepo
|
27
29
|
from ultralytics.utils.patches import imread, imshow, imwrite, torch_save # for patches
|
28
30
|
from ultralytics.utils.tqdm import TQDM # noqa
|
29
31
|
|
@@ -44,6 +46,7 @@ VERBOSE = str(os.getenv("YOLO_VERBOSE", True)).lower() == "true" # global verbo
|
|
44
46
|
LOGGING_NAME = "ultralytics"
|
45
47
|
MACOS, LINUX, WINDOWS = (platform.system() == x for x in ["Darwin", "Linux", "Windows"]) # environment booleans
|
46
48
|
MACOS_VERSION = platform.mac_ver()[0] if MACOS else None
|
49
|
+
NOT_MACOS14 = not (MACOS and MACOS_VERSION.startswith("14."))
|
47
50
|
ARM64 = platform.machine() in {"arm64", "aarch64"} # ARM64 booleans
|
48
51
|
PYTHON_VERSION = platform.python_version()
|
49
52
|
TORCH_VERSION = torch.__version__
|
@@ -189,9 +192,10 @@ class DataExportMixin:
|
|
189
192
|
def _to_str_simple(v):
|
190
193
|
if v is None:
|
191
194
|
return ""
|
192
|
-
|
195
|
+
elif isinstance(v, (dict, list, tuple, set)):
|
193
196
|
return repr(v)
|
194
|
-
|
197
|
+
else:
|
198
|
+
return str(v)
|
195
199
|
|
196
200
|
df_str = df.select(
|
197
201
|
[pl.col(c).map_elements(_to_str_simple, return_dtype=pl.String).alias(c) for c in df.columns]
|
@@ -413,10 +417,10 @@ def set_logging(name="LOGGING_NAME", verbose=True):
|
|
413
417
|
"""Format log records with prefixes based on level."""
|
414
418
|
# Apply prefixes based on log level
|
415
419
|
if record.levelno == logging.WARNING:
|
416
|
-
prefix = "WARNING
|
420
|
+
prefix = "WARNING" if WINDOWS else "WARNING ⚠️"
|
417
421
|
record.msg = f"{prefix} {record.msg}"
|
418
422
|
elif record.levelno == logging.ERROR:
|
419
|
-
prefix = "ERROR
|
423
|
+
prefix = "ERROR" if WINDOWS else "ERROR ❌"
|
420
424
|
record.msg = f"{prefix} {record.msg}"
|
421
425
|
|
422
426
|
# Handle emojis in message based on platform
|
@@ -427,7 +431,7 @@ def set_logging(name="LOGGING_NAME", verbose=True):
|
|
427
431
|
|
428
432
|
# Handle Windows UTF-8 encoding issues
|
429
433
|
if WINDOWS and hasattr(sys.stdout, "encoding") and sys.stdout.encoding != "utf-8":
|
430
|
-
|
434
|
+
with contextlib.suppress(Exception):
|
431
435
|
# Attempt to reconfigure stdout to use UTF-8 encoding if possible
|
432
436
|
if hasattr(sys.stdout, "reconfigure"):
|
433
437
|
sys.stdout.reconfigure(encoding="utf-8")
|
@@ -436,8 +440,6 @@ def set_logging(name="LOGGING_NAME", verbose=True):
|
|
436
440
|
import io
|
437
441
|
|
438
442
|
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8")
|
439
|
-
except Exception:
|
440
|
-
pass
|
441
443
|
|
442
444
|
# Create and configure the StreamHandler with the appropriate formatter and level
|
443
445
|
stream_handler = logging.StreamHandler(sys.stdout)
|
@@ -752,20 +754,21 @@ def is_jetson(jetpack=None) -> bool:
|
|
752
754
|
|
753
755
|
def is_online() -> bool:
|
754
756
|
"""
|
755
|
-
|
757
|
+
Fast online check using DNS (v4/v6) resolution (Cloudflare + Google).
|
756
758
|
|
757
759
|
Returns:
|
758
760
|
(bool): True if connection is successful, False otherwise.
|
759
761
|
"""
|
760
|
-
|
761
|
-
|
762
|
-
import socket
|
762
|
+
if str(os.getenv("YOLO_OFFLINE", "")).lower() == "true":
|
763
|
+
return False
|
763
764
|
|
764
|
-
|
765
|
-
|
765
|
+
for host in ("one.one.one.one", "dns.google"):
|
766
|
+
try:
|
767
|
+
socket.getaddrinfo(host, 0, socket.AF_UNSPEC, 0, 0, socket.AI_ADDRCONFIG)
|
766
768
|
return True
|
767
|
-
|
768
|
-
|
769
|
+
except OSError:
|
770
|
+
continue
|
771
|
+
return False
|
769
772
|
|
770
773
|
|
771
774
|
def is_pip_package(filepath: str = __name__) -> bool:
|
@@ -787,7 +790,7 @@ def is_pip_package(filepath: str = __name__) -> bool:
|
|
787
790
|
return spec is not None and spec.origin is not None
|
788
791
|
|
789
792
|
|
790
|
-
def is_dir_writeable(dir_path:
|
793
|
+
def is_dir_writeable(dir_path: str | Path) -> bool:
|
791
794
|
"""
|
792
795
|
Check if a directory is writeable.
|
793
796
|
|
@@ -820,58 +823,6 @@ def is_github_action_running() -> bool:
|
|
820
823
|
return "GITHUB_ACTIONS" in os.environ and "GITHUB_WORKFLOW" in os.environ and "RUNNER_OS" in os.environ
|
821
824
|
|
822
825
|
|
823
|
-
def get_git_dir():
|
824
|
-
"""
|
825
|
-
Determine whether the current file is part of a git repository and if so, return the repository root directory.
|
826
|
-
|
827
|
-
Returns:
|
828
|
-
(Path | None): Git root directory if found or None if not found.
|
829
|
-
"""
|
830
|
-
for d in Path(__file__).parents:
|
831
|
-
if (d / ".git").is_dir():
|
832
|
-
return d
|
833
|
-
|
834
|
-
|
835
|
-
def is_git_dir():
|
836
|
-
"""
|
837
|
-
Determine whether the current file is part of a git repository.
|
838
|
-
|
839
|
-
Returns:
|
840
|
-
(bool): True if current file is part of a git repository.
|
841
|
-
"""
|
842
|
-
return GIT_DIR is not None
|
843
|
-
|
844
|
-
|
845
|
-
def get_git_origin_url():
|
846
|
-
"""
|
847
|
-
Retrieve the origin URL of a git repository.
|
848
|
-
|
849
|
-
Returns:
|
850
|
-
(str | None): The origin URL of the git repository or None if not git directory.
|
851
|
-
"""
|
852
|
-
if IS_GIT_DIR:
|
853
|
-
try:
|
854
|
-
origin = subprocess.check_output(["git", "config", "--get", "remote.origin.url"])
|
855
|
-
return origin.decode().strip()
|
856
|
-
except subprocess.CalledProcessError:
|
857
|
-
return None
|
858
|
-
|
859
|
-
|
860
|
-
def get_git_branch():
|
861
|
-
"""
|
862
|
-
Return the current git branch name. If not in a git repository, return None.
|
863
|
-
|
864
|
-
Returns:
|
865
|
-
(str | None): The current git branch name or None if not a git directory.
|
866
|
-
"""
|
867
|
-
if IS_GIT_DIR:
|
868
|
-
try:
|
869
|
-
origin = subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"])
|
870
|
-
return origin.decode().strip()
|
871
|
-
except subprocess.CalledProcessError:
|
872
|
-
return None
|
873
|
-
|
874
|
-
|
875
826
|
def get_default_args(func):
|
876
827
|
"""
|
877
828
|
Return a dictionary of default arguments for a function.
|
@@ -944,8 +895,7 @@ IS_JETSON = is_jetson()
|
|
944
895
|
IS_JUPYTER = is_jupyter()
|
945
896
|
IS_PIP_PACKAGE = is_pip_package()
|
946
897
|
IS_RASPBERRYPI = is_raspberrypi()
|
947
|
-
|
948
|
-
IS_GIT_DIR = is_git_dir()
|
898
|
+
GIT = GitRepo()
|
949
899
|
USER_CONFIG_DIR = Path(os.getenv("YOLO_CONFIG_DIR") or get_user_config_dir()) # Ultralytics settings dir
|
950
900
|
SETTINGS_FILE = USER_CONFIG_DIR / "settings.json"
|
951
901
|
|
@@ -1168,7 +1118,7 @@ def set_sentry():
|
|
1168
1118
|
or TESTS_RUNNING
|
1169
1119
|
or not ONLINE
|
1170
1120
|
or not IS_PIP_PACKAGE
|
1171
|
-
or
|
1121
|
+
or GIT.is_repo
|
1172
1122
|
):
|
1173
1123
|
return
|
1174
1124
|
# If sentry_sdk package is not installed then return and do not use Sentry
|
@@ -1196,7 +1146,7 @@ def set_sentry():
|
|
1196
1146
|
event["tags"] = {
|
1197
1147
|
"sys_argv": ARGV[0],
|
1198
1148
|
"sys_argv_name": Path(ARGV[0]).name,
|
1199
|
-
"install": "git" if
|
1149
|
+
"install": "git" if GIT.is_repo else "pip" if IS_PIP_PACKAGE else "other",
|
1200
1150
|
"os": ENVIRONMENT,
|
1201
1151
|
}
|
1202
1152
|
return event
|
@@ -1243,7 +1193,7 @@ class JSONDict(dict):
|
|
1243
1193
|
>>> json_dict.clear()
|
1244
1194
|
"""
|
1245
1195
|
|
1246
|
-
def __init__(self, file_path:
|
1196
|
+
def __init__(self, file_path: str | Path = "data.json"):
|
1247
1197
|
"""Initialize a JSONDict object with a specified file path for JSON persistence."""
|
1248
1198
|
super().__init__()
|
1249
1199
|
self.file_path = Path(file_path)
|
@@ -1341,8 +1291,8 @@ class SettingsManager(JSONDict):
|
|
1341
1291
|
|
1342
1292
|
from ultralytics.utils.torch_utils import torch_distributed_zero_first
|
1343
1293
|
|
1344
|
-
root =
|
1345
|
-
datasets_root = (root.parent if
|
1294
|
+
root = GIT.root or Path()
|
1295
|
+
datasets_root = (root.parent if GIT.root and is_dir_writeable(root.parent) else root).resolve()
|
1346
1296
|
|
1347
1297
|
self.file = Path(file)
|
1348
1298
|
self.version = version
|
ultralytics/utils/autobatch.py
CHANGED
@@ -1,9 +1,10 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
"""Functions for estimating the best YOLO batch size to use a fraction of the available CUDA memory in PyTorch."""
|
3
3
|
|
4
|
+
from __future__ import annotations
|
5
|
+
|
4
6
|
import os
|
5
7
|
from copy import deepcopy
|
6
|
-
from typing import Union
|
7
8
|
|
8
9
|
import numpy as np
|
9
10
|
import torch
|
@@ -16,7 +17,7 @@ def check_train_batch_size(
|
|
16
17
|
model: torch.nn.Module,
|
17
18
|
imgsz: int = 640,
|
18
19
|
amp: bool = True,
|
19
|
-
batch:
|
20
|
+
batch: int | float = -1,
|
20
21
|
max_num_obj: int = 1,
|
21
22
|
) -> int:
|
22
23
|
"""
|
ultralytics/utils/autodevice.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
|
-
from
|
3
|
+
from __future__ import annotations
|
4
|
+
|
5
|
+
from typing import Any
|
4
6
|
|
5
7
|
from ultralytics.utils import LOGGER
|
6
8
|
from ultralytics.utils.checks import check_requirements
|
@@ -44,9 +46,9 @@ class GPUInfo:
|
|
44
46
|
|
45
47
|
def __init__(self):
|
46
48
|
"""Initialize GPUInfo, attempting to import and initialize pynvml."""
|
47
|
-
self.pynvml:
|
49
|
+
self.pynvml: Any | None = None
|
48
50
|
self.nvml_available: bool = False
|
49
|
-
self.gpu_stats:
|
51
|
+
self.gpu_stats: list[dict[str, Any]] = []
|
50
52
|
|
51
53
|
try:
|
52
54
|
check_requirements("nvidia-ml-py>=12.0.0")
|
@@ -78,13 +80,12 @@ class GPUInfo:
|
|
78
80
|
|
79
81
|
try:
|
80
82
|
device_count = self.pynvml.nvmlDeviceGetCount()
|
81
|
-
for i in range(device_count)
|
82
|
-
self.gpu_stats.append(self._get_device_stats(i))
|
83
|
+
self.gpu_stats.extend(self._get_device_stats(i) for i in range(device_count))
|
83
84
|
except Exception as e:
|
84
85
|
LOGGER.warning(f"Error during device query: {e}")
|
85
86
|
self.gpu_stats = []
|
86
87
|
|
87
|
-
def _get_device_stats(self, index: int) ->
|
88
|
+
def _get_device_stats(self, index: int) -> dict[str, Any]:
|
88
89
|
"""Get stats for a single GPU device."""
|
89
90
|
handle = self.pynvml.nvmlDeviceGetHandleByIndex(index)
|
90
91
|
memory = self.pynvml.nvmlDeviceGetMemoryInfo(handle)
|
@@ -135,7 +136,7 @@ class GPUInfo:
|
|
135
136
|
|
136
137
|
def select_idle_gpu(
|
137
138
|
self, count: int = 1, min_memory_fraction: float = 0, min_util_fraction: float = 0
|
138
|
-
) ->
|
139
|
+
) -> list[int]:
|
139
140
|
"""
|
140
141
|
Select the most idle GPUs based on utilization and free memory.
|
141
142
|
|
@@ -195,12 +196,11 @@ if __name__ == "__main__":
|
|
195
196
|
gpu_info = GPUInfo()
|
196
197
|
gpu_info.print_status()
|
197
198
|
|
198
|
-
selected
|
199
|
+
if selected := gpu_info.select_idle_gpu(
|
199
200
|
count=num_gpus_to_select,
|
200
201
|
min_memory_fraction=required_free_mem_fraction,
|
201
202
|
min_util_fraction=required_free_util_fraction,
|
202
|
-
)
|
203
|
-
if selected:
|
203
|
+
):
|
204
204
|
print(f"\n==> Using selected GPU indices: {selected}")
|
205
205
|
devices = [f"cuda:{idx}" for idx in selected]
|
206
206
|
print(f" Target devices: {devices}")
|
ultralytics/utils/benchmarks.py
CHANGED
@@ -27,6 +27,8 @@ IMX | `imx` | yolo11n_imx_model/
|
|
27
27
|
RKNN | `rknn` | yolo11n_rknn_model/
|
28
28
|
"""
|
29
29
|
|
30
|
+
from __future__ import annotations
|
31
|
+
|
30
32
|
import glob
|
31
33
|
import os
|
32
34
|
import platform
|
@@ -34,7 +36,6 @@ import re
|
|
34
36
|
import shutil
|
35
37
|
import time
|
36
38
|
from pathlib import Path
|
37
|
-
from typing import List, Optional, Tuple, Union
|
38
39
|
|
39
40
|
import numpy as np
|
40
41
|
import torch.cuda
|
@@ -400,14 +401,14 @@ class ProfileModels:
|
|
400
401
|
|
401
402
|
def __init__(
|
402
403
|
self,
|
403
|
-
paths:
|
404
|
+
paths: list[str],
|
404
405
|
num_timed_runs: int = 100,
|
405
406
|
num_warmup_runs: int = 10,
|
406
407
|
min_time: float = 60,
|
407
408
|
imgsz: int = 640,
|
408
409
|
half: bool = True,
|
409
410
|
trt: bool = True,
|
410
|
-
device:
|
411
|
+
device: torch.device | str | None = None,
|
411
412
|
):
|
412
413
|
"""
|
413
414
|
Initialize the ProfileModels class for profiling models.
|
@@ -650,9 +651,9 @@ class ProfileModels:
|
|
650
651
|
def generate_table_row(
|
651
652
|
self,
|
652
653
|
model_name: str,
|
653
|
-
t_onnx:
|
654
|
-
t_engine:
|
655
|
-
model_info:
|
654
|
+
t_onnx: tuple[float, float],
|
655
|
+
t_engine: tuple[float, float],
|
656
|
+
model_info: tuple[float, float, float, float],
|
656
657
|
):
|
657
658
|
"""
|
658
659
|
Generate a table row string with model performance metrics.
|
@@ -675,9 +676,9 @@ class ProfileModels:
|
|
675
676
|
@staticmethod
|
676
677
|
def generate_results_dict(
|
677
678
|
model_name: str,
|
678
|
-
t_onnx:
|
679
|
-
t_engine:
|
680
|
-
model_info:
|
679
|
+
t_onnx: tuple[float, float],
|
680
|
+
t_engine: tuple[float, float],
|
681
|
+
model_info: tuple[float, float, float, float],
|
681
682
|
):
|
682
683
|
"""
|
683
684
|
Generate a dictionary of profiling results.
|
@@ -701,7 +702,7 @@ class ProfileModels:
|
|
701
702
|
}
|
702
703
|
|
703
704
|
@staticmethod
|
704
|
-
def print_table(table_rows:
|
705
|
+
def print_table(table_rows: list[str]):
|
705
706
|
"""
|
706
707
|
Print a formatted table of model profiling results.
|
707
708
|
|
@@ -1,8 +1,10 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
from collections.abc import Callable
|
4
6
|
from types import SimpleNamespace
|
5
|
-
from typing import Any
|
7
|
+
from typing import Any
|
6
8
|
|
7
9
|
import cv2
|
8
10
|
import numpy as np
|
@@ -147,7 +149,7 @@ def _fetch_trainer_metadata(trainer) -> dict:
|
|
147
149
|
|
148
150
|
def _scale_bounding_box_to_original_image_shape(
|
149
151
|
box, resized_image_shape, original_image_shape, ratio_pad
|
150
|
-
) ->
|
152
|
+
) -> list[float]:
|
151
153
|
"""
|
152
154
|
Scale bounding box from resized image coordinates to original image coordinates.
|
153
155
|
|
@@ -178,7 +180,7 @@ def _scale_bounding_box_to_original_image_shape(
|
|
178
180
|
return box
|
179
181
|
|
180
182
|
|
181
|
-
def _format_ground_truth_annotations_for_detection(img_idx, image_path, batch, class_name_map=None) ->
|
183
|
+
def _format_ground_truth_annotations_for_detection(img_idx, image_path, batch, class_name_map=None) -> dict | None:
|
182
184
|
"""
|
183
185
|
Format ground truth annotations for object detection.
|
184
186
|
|
@@ -233,7 +235,7 @@ def _format_ground_truth_annotations_for_detection(img_idx, image_path, batch, c
|
|
233
235
|
return {"name": "ground_truth", "data": data}
|
234
236
|
|
235
237
|
|
236
|
-
def _format_prediction_annotations(image_path, metadata, class_label_map=None, class_map=None) ->
|
238
|
+
def _format_prediction_annotations(image_path, metadata, class_label_map=None, class_map=None) -> dict | None:
|
237
239
|
"""
|
238
240
|
Format YOLO predictions for object detection visualization.
|
239
241
|
|
@@ -286,7 +288,7 @@ def _format_prediction_annotations(image_path, metadata, class_label_map=None, c
|
|
286
288
|
return {"name": "prediction", "data": data}
|
287
289
|
|
288
290
|
|
289
|
-
def _extract_segmentation_annotation(segmentation_raw: str, decode: Callable) ->
|
291
|
+
def _extract_segmentation_annotation(segmentation_raw: str, decode: Callable) -> list[list[Any]] | None:
|
290
292
|
"""
|
291
293
|
Extract segmentation annotation from compressed segmentations as list of polygons.
|
292
294
|
|
@@ -307,9 +309,7 @@ def _extract_segmentation_annotation(segmentation_raw: str, decode: Callable) ->
|
|
307
309
|
return None
|
308
310
|
|
309
311
|
|
310
|
-
def _fetch_annotations(
|
311
|
-
img_idx, image_path, batch, prediction_metadata_map, class_label_map, class_map
|
312
|
-
) -> Optional[List]:
|
312
|
+
def _fetch_annotations(img_idx, image_path, batch, prediction_metadata_map, class_label_map, class_map) -> list | None:
|
313
313
|
"""
|
314
314
|
Join the ground truth and prediction annotations if they exist.
|
315
315
|
|
@@ -356,7 +356,7 @@ def _log_confusion_matrix(experiment, trainer, curr_step, curr_epoch) -> None:
|
|
356
356
|
)
|
357
357
|
|
358
358
|
|
359
|
-
def _log_images(experiment, image_paths, curr_step:
|
359
|
+
def _log_images(experiment, image_paths, curr_step: int | None, annotations=None) -> None:
|
360
360
|
"""
|
361
361
|
Log images to the experiment with optional annotations.
|
362
362
|
|
@@ -1,12 +1,13 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
3
|
from ultralytics.utils import RANK, SETTINGS
|
4
|
-
from ultralytics.utils.logger import DEFAULT_LOG_PATH, ConsoleLogger, SystemLogger
|
5
4
|
|
6
5
|
|
7
6
|
def on_pretrain_routine_start(trainer):
|
8
7
|
"""Initialize and start console logging immediately at the very beginning."""
|
9
8
|
if RANK in {-1, 0}:
|
9
|
+
from ultralytics.utils.logger import DEFAULT_LOG_PATH, ConsoleLogger, SystemLogger
|
10
|
+
|
10
11
|
trainer.system_logger = SystemLogger()
|
11
12
|
trainer.console_logger = ConsoleLogger(DEFAULT_LOG_PATH)
|
12
13
|
trainer.console_logger.start_capture()
|
ultralytics/utils/checks.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
import functools
|
4
6
|
import glob
|
5
7
|
import inspect
|
@@ -13,7 +15,6 @@ import time
|
|
13
15
|
from importlib import metadata
|
14
16
|
from pathlib import Path
|
15
17
|
from types import SimpleNamespace
|
16
|
-
from typing import Optional
|
17
18
|
|
18
19
|
import cv2
|
19
20
|
import numpy as np
|
@@ -23,8 +24,8 @@ from ultralytics.utils import (
|
|
23
24
|
ARM64,
|
24
25
|
ASSETS,
|
25
26
|
AUTOINSTALL,
|
27
|
+
GIT,
|
26
28
|
IS_COLAB,
|
27
|
-
IS_GIT_DIR,
|
28
29
|
IS_JETSON,
|
29
30
|
IS_KAGGLE,
|
30
31
|
IS_PIP_PACKAGE,
|
@@ -274,7 +275,7 @@ def check_latest_pypi_version(package_name="ultralytics"):
|
|
274
275
|
Returns:
|
275
276
|
(str): The latest version of the package.
|
276
277
|
"""
|
277
|
-
import requests # slow import
|
278
|
+
import requests # scoped as slow import
|
278
279
|
|
279
280
|
try:
|
280
281
|
requests.packages.urllib3.disable_warnings() # Disable the InsecureRequestWarning
|
@@ -401,16 +402,22 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
|
|
401
402
|
def attempt_install(packages, commands, use_uv):
|
402
403
|
"""Attempt package installation with uv if available, falling back to pip."""
|
403
404
|
if use_uv:
|
404
|
-
base =
|
405
|
+
base = (
|
406
|
+
f"uv pip install --no-cache-dir {packages} {commands} "
|
407
|
+
f"--index-strategy=unsafe-best-match --break-system-packages --prerelease=allow"
|
408
|
+
)
|
405
409
|
try:
|
406
|
-
return subprocess.check_output(base, shell=True, stderr=subprocess.PIPE)
|
410
|
+
return subprocess.check_output(base, shell=True, stderr=subprocess.PIPE, text=True)
|
407
411
|
except subprocess.CalledProcessError as e:
|
408
|
-
if e.stderr and "No virtual environment found" in e.stderr
|
412
|
+
if e.stderr and "No virtual environment found" in e.stderr:
|
409
413
|
return subprocess.check_output(
|
410
|
-
base.replace("uv pip install", "uv pip install --system"),
|
411
|
-
|
414
|
+
base.replace("uv pip install", "uv pip install --system"),
|
415
|
+
shell=True,
|
416
|
+
stderr=subprocess.PIPE,
|
417
|
+
text=True,
|
418
|
+
)
|
412
419
|
raise
|
413
|
-
return subprocess.check_output(f"pip install --no-cache-dir {packages} {commands}", shell=True)
|
420
|
+
return subprocess.check_output(f"pip install --no-cache-dir {packages} {commands}", shell=True, text=True)
|
414
421
|
|
415
422
|
s = " ".join(f'"{x}"' for x in pkgs) # console string
|
416
423
|
if s:
|
@@ -637,7 +644,7 @@ def check_yolo(verbose=True, device=""):
|
|
637
644
|
verbose (bool): Whether to print verbose information.
|
638
645
|
device (str | torch.device): Device to use for YOLO.
|
639
646
|
"""
|
640
|
-
import psutil
|
647
|
+
import psutil # scoped as slow import
|
641
648
|
|
642
649
|
from ultralytics.utils.torch_utils import select_device
|
643
650
|
|
@@ -670,7 +677,7 @@ def collect_system_info():
|
|
670
677
|
Returns:
|
671
678
|
(dict): Dictionary containing system information.
|
672
679
|
"""
|
673
|
-
import psutil
|
680
|
+
import psutil # scoped as slow import
|
674
681
|
|
675
682
|
from ultralytics.utils import ENVIRONMENT # scope to avoid circular import
|
676
683
|
from ultralytics.utils.torch_utils import get_cpu_info, get_gpu_info
|
@@ -684,7 +691,7 @@ def collect_system_info():
|
|
684
691
|
"OS": platform.platform(),
|
685
692
|
"Environment": ENVIRONMENT,
|
686
693
|
"Python": PYTHON_VERSION,
|
687
|
-
"Install": "git" if
|
694
|
+
"Install": "git" if GIT.is_repo else "pip" if IS_PIP_PACKAGE else "other",
|
688
695
|
"Path": str(ROOT),
|
689
696
|
"RAM": f"{psutil.virtual_memory().total / gib:.2f} GB",
|
690
697
|
"Disk": f"{(total - free) / gib:.1f}/{total / gib:.1f} GB",
|
@@ -797,23 +804,7 @@ def check_amp(model):
|
|
797
804
|
return True
|
798
805
|
|
799
806
|
|
800
|
-
def
|
801
|
-
"""
|
802
|
-
Return human-readable git description, i.e. v5.0-5-g3e25f1e https://git-scm.com/docs/git-describe.
|
803
|
-
|
804
|
-
Args:
|
805
|
-
path (Path): Path to git repository.
|
806
|
-
|
807
|
-
Returns:
|
808
|
-
(str): Human-readable git description.
|
809
|
-
"""
|
810
|
-
try:
|
811
|
-
return subprocess.check_output(f"git -C {path} describe --tags --long --always", shell=True).decode()[:-1]
|
812
|
-
except Exception:
|
813
|
-
return ""
|
814
|
-
|
815
|
-
|
816
|
-
def print_args(args: Optional[dict] = None, show_file=True, show_func=False):
|
807
|
+
def print_args(args: dict | None = None, show_file=True, show_func=False):
|
817
808
|
"""
|
818
809
|
Print function arguments (optional args dict).
|
819
810
|
|
ultralytics/utils/downloads.py
CHANGED
@@ -252,7 +252,7 @@ def get_google_drive_file_info(link: str) -> tuple[str, str | None]:
|
|
252
252
|
>>> link = "https://drive.google.com/file/d/1cqT-cJgANNrhIHCrEufUYhQ4RqiWG_lJ/view?usp=drive_link"
|
253
253
|
>>> url, filename = get_google_drive_file_info(link)
|
254
254
|
"""
|
255
|
-
import requests # slow import
|
255
|
+
import requests # scoped as slow import
|
256
256
|
|
257
257
|
file_id = link.split("/d/")[1].split("/view", 1)[0]
|
258
258
|
drive_url = f"https://drive.google.com/uc?export=download&id={file_id}"
|
@@ -416,7 +416,7 @@ def get_github_assets(
|
|
416
416
|
Examples:
|
417
417
|
>>> tag, assets = get_github_assets(repo="ultralytics/assets", version="latest")
|
418
418
|
"""
|
419
|
-
import requests # slow import
|
419
|
+
import requests # scoped as slow import
|
420
420
|
|
421
421
|
if version != "latest":
|
422
422
|
version = f"tags/{version}" # i.e. tags/v6.2
|
ultralytics/utils/export.py
CHANGED
@@ -1,8 +1,9 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
import json
|
4
6
|
from pathlib import Path
|
5
|
-
from typing import Dict, List, Optional, Tuple, Union
|
6
7
|
|
7
8
|
import torch
|
8
9
|
|
@@ -14,9 +15,9 @@ def export_onnx(
|
|
14
15
|
im: torch.Tensor,
|
15
16
|
onnx_file: str,
|
16
17
|
opset: int = 14,
|
17
|
-
input_names:
|
18
|
-
output_names:
|
19
|
-
dynamic:
|
18
|
+
input_names: list[str] = ["images"],
|
19
|
+
output_names: list[str] = ["output0"],
|
20
|
+
dynamic: bool | dict = False,
|
20
21
|
) -> None:
|
21
22
|
"""
|
22
23
|
Export a PyTorch model to ONNX format.
|
@@ -48,15 +49,15 @@ def export_onnx(
|
|
48
49
|
|
49
50
|
def export_engine(
|
50
51
|
onnx_file: str,
|
51
|
-
engine_file:
|
52
|
-
workspace:
|
52
|
+
engine_file: str | None = None,
|
53
|
+
workspace: int | None = None,
|
53
54
|
half: bool = False,
|
54
55
|
int8: bool = False,
|
55
56
|
dynamic: bool = False,
|
56
|
-
shape:
|
57
|
-
dla:
|
57
|
+
shape: tuple[int, int, int, int] = (1, 3, 640, 640),
|
58
|
+
dla: int | None = None,
|
58
59
|
dataset=None,
|
59
|
-
metadata:
|
60
|
+
metadata: dict | None = None,
|
60
61
|
verbose: bool = False,
|
61
62
|
prefix: str = "",
|
62
63
|
) -> None:
|
@@ -196,7 +197,7 @@ def export_engine(
|
|
196
197
|
"""Get the batch size to use for calibration."""
|
197
198
|
return self.batch or 1
|
198
199
|
|
199
|
-
def get_batch(self, names) ->
|
200
|
+
def get_batch(self, names) -> list[int] | None:
|
200
201
|
"""Get the next batch to use for calibration, as a list of device memory pointers."""
|
201
202
|
try:
|
202
203
|
im0s = next(self.data_iter)["img"] / 255.0
|
@@ -206,7 +207,7 @@ def export_engine(
|
|
206
207
|
# Return None to signal to TensorRT there is no calibration data remaining
|
207
208
|
return None
|
208
209
|
|
209
|
-
def read_calibration_cache(self) ->
|
210
|
+
def read_calibration_cache(self) -> bytes | None:
|
210
211
|
"""Use existing cache instead of calibrating again, otherwise, implicitly return None."""
|
211
212
|
if self.cache.exists() and self.cache.suffix == ".cache":
|
212
213
|
return self.cache.read_bytes()
|