dgenerate-ultralytics-headless 8.3.190__py3-none-any.whl → 8.3.192__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dgenerate_ultralytics_headless-8.3.190.dist-info → dgenerate_ultralytics_headless-8.3.192.dist-info}/METADATA +1 -1
- {dgenerate_ultralytics_headless-8.3.190.dist-info → dgenerate_ultralytics_headless-8.3.192.dist-info}/RECORD +103 -102
- tests/test_cuda.py +6 -5
- tests/test_exports.py +1 -6
- tests/test_python.py +1 -4
- tests/test_solutions.py +1 -1
- ultralytics/__init__.py +1 -1
- ultralytics/cfg/__init__.py +16 -14
- ultralytics/cfg/datasets/SKU-110K.yaml +1 -1
- ultralytics/cfg/datasets/VisDrone.yaml +4 -4
- ultralytics/data/annotator.py +6 -6
- ultralytics/data/augment.py +53 -51
- ultralytics/data/base.py +15 -13
- ultralytics/data/build.py +7 -4
- ultralytics/data/converter.py +9 -10
- ultralytics/data/dataset.py +24 -22
- ultralytics/data/loaders.py +13 -11
- ultralytics/data/split.py +4 -3
- ultralytics/data/split_dota.py +14 -12
- ultralytics/data/utils.py +29 -23
- ultralytics/engine/exporter.py +2 -2
- ultralytics/engine/model.py +16 -14
- ultralytics/engine/predictor.py +8 -6
- ultralytics/engine/results.py +54 -52
- ultralytics/engine/trainer.py +8 -3
- ultralytics/engine/tuner.py +230 -42
- ultralytics/hub/google/__init__.py +7 -6
- ultralytics/hub/session.py +8 -6
- ultralytics/hub/utils.py +3 -4
- ultralytics/models/fastsam/model.py +8 -6
- ultralytics/models/nas/model.py +5 -3
- ultralytics/models/rtdetr/train.py +4 -3
- ultralytics/models/rtdetr/val.py +6 -4
- ultralytics/models/sam/amg.py +13 -10
- ultralytics/models/sam/model.py +3 -2
- ultralytics/models/sam/modules/blocks.py +21 -21
- ultralytics/models/sam/modules/decoders.py +11 -11
- ultralytics/models/sam/modules/encoders.py +25 -25
- ultralytics/models/sam/modules/memory_attention.py +9 -8
- ultralytics/models/sam/modules/sam.py +8 -10
- ultralytics/models/sam/modules/tiny_encoder.py +21 -20
- ultralytics/models/sam/modules/transformer.py +6 -5
- ultralytics/models/sam/modules/utils.py +7 -5
- ultralytics/models/sam/predict.py +32 -31
- ultralytics/models/utils/loss.py +29 -27
- ultralytics/models/utils/ops.py +10 -8
- ultralytics/models/yolo/classify/train.py +9 -7
- ultralytics/models/yolo/classify/val.py +11 -9
- ultralytics/models/yolo/detect/predict.py +1 -1
- ultralytics/models/yolo/detect/train.py +8 -6
- ultralytics/models/yolo/detect/val.py +22 -20
- ultralytics/models/yolo/model.py +14 -14
- ultralytics/models/yolo/obb/train.py +5 -3
- ultralytics/models/yolo/obb/val.py +11 -9
- ultralytics/models/yolo/pose/train.py +7 -5
- ultralytics/models/yolo/pose/val.py +12 -10
- ultralytics/models/yolo/segment/train.py +4 -5
- ultralytics/models/yolo/segment/val.py +13 -11
- ultralytics/models/yolo/world/train.py +10 -8
- ultralytics/models/yolo/yoloe/train.py +10 -10
- ultralytics/models/yolo/yoloe/val.py +11 -9
- ultralytics/nn/autobackend.py +17 -19
- ultralytics/nn/modules/block.py +12 -12
- ultralytics/nn/modules/conv.py +4 -3
- ultralytics/nn/modules/head.py +41 -37
- ultralytics/nn/modules/transformer.py +22 -21
- ultralytics/nn/tasks.py +2 -2
- ultralytics/nn/text_model.py +6 -5
- ultralytics/solutions/analytics.py +7 -5
- ultralytics/solutions/config.py +12 -10
- ultralytics/solutions/distance_calculation.py +3 -3
- ultralytics/solutions/heatmap.py +4 -2
- ultralytics/solutions/object_counter.py +5 -3
- ultralytics/solutions/parking_management.py +4 -2
- ultralytics/solutions/region_counter.py +7 -5
- ultralytics/solutions/similarity_search.py +5 -3
- ultralytics/solutions/solutions.py +38 -36
- ultralytics/solutions/streamlit_inference.py +8 -7
- ultralytics/trackers/bot_sort.py +11 -9
- ultralytics/trackers/byte_tracker.py +17 -15
- ultralytics/trackers/utils/gmc.py +4 -3
- ultralytics/utils/__init__.py +16 -88
- ultralytics/utils/autobatch.py +3 -2
- ultralytics/utils/autodevice.py +10 -10
- ultralytics/utils/benchmarks.py +11 -10
- ultralytics/utils/callbacks/comet.py +9 -9
- ultralytics/utils/checks.py +17 -26
- ultralytics/utils/export.py +12 -11
- ultralytics/utils/files.py +8 -7
- ultralytics/utils/git.py +139 -0
- ultralytics/utils/instance.py +8 -7
- ultralytics/utils/loss.py +15 -13
- ultralytics/utils/metrics.py +62 -62
- ultralytics/utils/ops.py +3 -2
- ultralytics/utils/patches.py +6 -4
- ultralytics/utils/plotting.py +20 -18
- ultralytics/utils/torch_utils.py +4 -2
- ultralytics/utils/tqdm.py +18 -14
- ultralytics/utils/triton.py +3 -2
- {dgenerate_ultralytics_headless-8.3.190.dist-info → dgenerate_ultralytics_headless-8.3.192.dist-info}/WHEEL +0 -0
- {dgenerate_ultralytics_headless-8.3.190.dist-info → dgenerate_ultralytics_headless-8.3.192.dist-info}/entry_points.txt +0 -0
- {dgenerate_ultralytics_headless-8.3.190.dist-info → dgenerate_ultralytics_headless-8.3.192.dist-info}/licenses/LICENSE +0 -0
- {dgenerate_ultralytics_headless-8.3.190.dist-info → dgenerate_ultralytics_headless-8.3.192.dist-info}/top_level.txt +0 -0
ultralytics/utils/__init__.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
import contextlib
|
4
6
|
import importlib.metadata
|
5
7
|
import inspect
|
@@ -9,7 +11,6 @@ import os
|
|
9
11
|
import platform
|
10
12
|
import re
|
11
13
|
import socket
|
12
|
-
import subprocess
|
13
14
|
import sys
|
14
15
|
import threading
|
15
16
|
import time
|
@@ -17,7 +18,6 @@ from functools import lru_cache
|
|
17
18
|
from pathlib import Path
|
18
19
|
from threading import Lock
|
19
20
|
from types import SimpleNamespace
|
20
|
-
from typing import Union
|
21
21
|
from urllib.parse import unquote
|
22
22
|
|
23
23
|
import cv2
|
@@ -25,6 +25,7 @@ import numpy as np
|
|
25
25
|
import torch
|
26
26
|
|
27
27
|
from ultralytics import __version__
|
28
|
+
from ultralytics.utils.git import GitRepo
|
28
29
|
from ultralytics.utils.patches import imread, imshow, imwrite, torch_save # for patches
|
29
30
|
from ultralytics.utils.tqdm import TQDM # noqa
|
30
31
|
|
@@ -191,9 +192,10 @@ class DataExportMixin:
|
|
191
192
|
def _to_str_simple(v):
|
192
193
|
if v is None:
|
193
194
|
return ""
|
194
|
-
|
195
|
+
elif isinstance(v, (dict, list, tuple, set)):
|
195
196
|
return repr(v)
|
196
|
-
|
197
|
+
else:
|
198
|
+
return str(v)
|
197
199
|
|
198
200
|
df_str = df.select(
|
199
201
|
[pl.col(c).map_elements(_to_str_simple, return_dtype=pl.String).alias(c) for c in df.columns]
|
@@ -415,10 +417,10 @@ def set_logging(name="LOGGING_NAME", verbose=True):
|
|
415
417
|
"""Format log records with prefixes based on level."""
|
416
418
|
# Apply prefixes based on log level
|
417
419
|
if record.levelno == logging.WARNING:
|
418
|
-
prefix = "WARNING
|
420
|
+
prefix = "WARNING" if WINDOWS else "WARNING ⚠️"
|
419
421
|
record.msg = f"{prefix} {record.msg}"
|
420
422
|
elif record.levelno == logging.ERROR:
|
421
|
-
prefix = "ERROR
|
423
|
+
prefix = "ERROR" if WINDOWS else "ERROR ❌"
|
422
424
|
record.msg = f"{prefix} {record.msg}"
|
423
425
|
|
424
426
|
# Handle emojis in message based on platform
|
@@ -429,7 +431,7 @@ def set_logging(name="LOGGING_NAME", verbose=True):
|
|
429
431
|
|
430
432
|
# Handle Windows UTF-8 encoding issues
|
431
433
|
if WINDOWS and hasattr(sys.stdout, "encoding") and sys.stdout.encoding != "utf-8":
|
432
|
-
|
434
|
+
with contextlib.suppress(Exception):
|
433
435
|
# Attempt to reconfigure stdout to use UTF-8 encoding if possible
|
434
436
|
if hasattr(sys.stdout, "reconfigure"):
|
435
437
|
sys.stdout.reconfigure(encoding="utf-8")
|
@@ -438,8 +440,6 @@ def set_logging(name="LOGGING_NAME", verbose=True):
|
|
438
440
|
import io
|
439
441
|
|
440
442
|
sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8")
|
441
|
-
except Exception:
|
442
|
-
pass
|
443
443
|
|
444
444
|
# Create and configure the StreamHandler with the appropriate formatter and level
|
445
445
|
stream_handler = logging.StreamHandler(sys.stdout)
|
@@ -790,7 +790,7 @@ def is_pip_package(filepath: str = __name__) -> bool:
|
|
790
790
|
return spec is not None and spec.origin is not None
|
791
791
|
|
792
792
|
|
793
|
-
def is_dir_writeable(dir_path:
|
793
|
+
def is_dir_writeable(dir_path: str | Path) -> bool:
|
794
794
|
"""
|
795
795
|
Check if a directory is writeable.
|
796
796
|
|
@@ -823,77 +823,6 @@ def is_github_action_running() -> bool:
|
|
823
823
|
return "GITHUB_ACTIONS" in os.environ and "GITHUB_WORKFLOW" in os.environ and "RUNNER_OS" in os.environ
|
824
824
|
|
825
825
|
|
826
|
-
def get_git_dir():
|
827
|
-
"""
|
828
|
-
Determine whether the current file is part of a git repository and if so, return the repository root directory.
|
829
|
-
|
830
|
-
Returns:
|
831
|
-
(Path | None): Git root directory if found or None if not found.
|
832
|
-
"""
|
833
|
-
for d in Path(__file__).parents:
|
834
|
-
if (d / ".git").is_dir():
|
835
|
-
return d
|
836
|
-
|
837
|
-
|
838
|
-
def is_git_dir():
|
839
|
-
"""
|
840
|
-
Determine whether the current file is part of a git repository.
|
841
|
-
|
842
|
-
Returns:
|
843
|
-
(bool): True if current file is part of a git repository.
|
844
|
-
"""
|
845
|
-
return GIT_DIR is not None
|
846
|
-
|
847
|
-
|
848
|
-
@lru_cache(maxsize=1)
|
849
|
-
def get_git_origin_url():
|
850
|
-
"""
|
851
|
-
Retrieve the origin URL of a git repository.
|
852
|
-
|
853
|
-
Returns:
|
854
|
-
(str | None): The origin URL of the git repository or None if not git directory.
|
855
|
-
"""
|
856
|
-
if IS_GIT_DIR:
|
857
|
-
try:
|
858
|
-
return subprocess.check_output(
|
859
|
-
["git", "config", "--get", "remote.origin.url"], stderr=subprocess.DEVNULL, text=True
|
860
|
-
).strip()
|
861
|
-
except subprocess.CalledProcessError:
|
862
|
-
return None
|
863
|
-
|
864
|
-
|
865
|
-
@lru_cache(maxsize=1)
|
866
|
-
def get_git_branch():
|
867
|
-
"""
|
868
|
-
Return the current git branch name. If not in a git repository, return None.
|
869
|
-
|
870
|
-
Returns:
|
871
|
-
(str | None): The current git branch name or None if not a git directory.
|
872
|
-
"""
|
873
|
-
if IS_GIT_DIR:
|
874
|
-
try:
|
875
|
-
return subprocess.check_output(
|
876
|
-
["git", "rev-parse", "--abbrev-ref", "HEAD"], stderr=subprocess.DEVNULL, text=True
|
877
|
-
).strip()
|
878
|
-
except subprocess.CalledProcessError:
|
879
|
-
return None
|
880
|
-
|
881
|
-
|
882
|
-
@lru_cache(maxsize=1)
|
883
|
-
def get_git_commit():
|
884
|
-
"""
|
885
|
-
Return the current git commit hash. If not in a git repository, return None.
|
886
|
-
|
887
|
-
Returns:
|
888
|
-
(str | None): The current git commit hash or None if not a git directory.
|
889
|
-
"""
|
890
|
-
if IS_GIT_DIR:
|
891
|
-
try:
|
892
|
-
return subprocess.check_output(["git", "rev-parse", "HEAD"], stderr=subprocess.DEVNULL, text=True).strip()
|
893
|
-
except subprocess.CalledProcessError:
|
894
|
-
return None
|
895
|
-
|
896
|
-
|
897
826
|
def get_default_args(func):
|
898
827
|
"""
|
899
828
|
Return a dictionary of default arguments for a function.
|
@@ -966,8 +895,7 @@ IS_JETSON = is_jetson()
|
|
966
895
|
IS_JUPYTER = is_jupyter()
|
967
896
|
IS_PIP_PACKAGE = is_pip_package()
|
968
897
|
IS_RASPBERRYPI = is_raspberrypi()
|
969
|
-
|
970
|
-
IS_GIT_DIR = is_git_dir()
|
898
|
+
GIT = GitRepo()
|
971
899
|
USER_CONFIG_DIR = Path(os.getenv("YOLO_CONFIG_DIR") or get_user_config_dir()) # Ultralytics settings dir
|
972
900
|
SETTINGS_FILE = USER_CONFIG_DIR / "settings.json"
|
973
901
|
|
@@ -1190,7 +1118,7 @@ def set_sentry():
|
|
1190
1118
|
or TESTS_RUNNING
|
1191
1119
|
or not ONLINE
|
1192
1120
|
or not IS_PIP_PACKAGE
|
1193
|
-
or
|
1121
|
+
or GIT.is_repo
|
1194
1122
|
):
|
1195
1123
|
return
|
1196
1124
|
# If sentry_sdk package is not installed then return and do not use Sentry
|
@@ -1218,7 +1146,7 @@ def set_sentry():
|
|
1218
1146
|
event["tags"] = {
|
1219
1147
|
"sys_argv": ARGV[0],
|
1220
1148
|
"sys_argv_name": Path(ARGV[0]).name,
|
1221
|
-
"install": "git" if
|
1149
|
+
"install": "git" if GIT.is_repo else "pip" if IS_PIP_PACKAGE else "other",
|
1222
1150
|
"os": ENVIRONMENT,
|
1223
1151
|
}
|
1224
1152
|
return event
|
@@ -1265,7 +1193,7 @@ class JSONDict(dict):
|
|
1265
1193
|
>>> json_dict.clear()
|
1266
1194
|
"""
|
1267
1195
|
|
1268
|
-
def __init__(self, file_path:
|
1196
|
+
def __init__(self, file_path: str | Path = "data.json"):
|
1269
1197
|
"""Initialize a JSONDict object with a specified file path for JSON persistence."""
|
1270
1198
|
super().__init__()
|
1271
1199
|
self.file_path = Path(file_path)
|
@@ -1363,8 +1291,8 @@ class SettingsManager(JSONDict):
|
|
1363
1291
|
|
1364
1292
|
from ultralytics.utils.torch_utils import torch_distributed_zero_first
|
1365
1293
|
|
1366
|
-
root =
|
1367
|
-
datasets_root = (root.parent if
|
1294
|
+
root = GIT.root or Path()
|
1295
|
+
datasets_root = (root.parent if GIT.root and is_dir_writeable(root.parent) else root).resolve()
|
1368
1296
|
|
1369
1297
|
self.file = Path(file)
|
1370
1298
|
self.version = version
|
ultralytics/utils/autobatch.py
CHANGED
@@ -1,9 +1,10 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
"""Functions for estimating the best YOLO batch size to use a fraction of the available CUDA memory in PyTorch."""
|
3
3
|
|
4
|
+
from __future__ import annotations
|
5
|
+
|
4
6
|
import os
|
5
7
|
from copy import deepcopy
|
6
|
-
from typing import Union
|
7
8
|
|
8
9
|
import numpy as np
|
9
10
|
import torch
|
@@ -16,7 +17,7 @@ def check_train_batch_size(
|
|
16
17
|
model: torch.nn.Module,
|
17
18
|
imgsz: int = 640,
|
18
19
|
amp: bool = True,
|
19
|
-
batch:
|
20
|
+
batch: int | float = -1,
|
20
21
|
max_num_obj: int = 1,
|
21
22
|
) -> int:
|
22
23
|
"""
|
ultralytics/utils/autodevice.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
|
-
from
|
3
|
+
from __future__ import annotations
|
4
|
+
|
5
|
+
from typing import Any
|
4
6
|
|
5
7
|
from ultralytics.utils import LOGGER
|
6
8
|
from ultralytics.utils.checks import check_requirements
|
@@ -44,9 +46,9 @@ class GPUInfo:
|
|
44
46
|
|
45
47
|
def __init__(self):
|
46
48
|
"""Initialize GPUInfo, attempting to import and initialize pynvml."""
|
47
|
-
self.pynvml:
|
49
|
+
self.pynvml: Any | None = None
|
48
50
|
self.nvml_available: bool = False
|
49
|
-
self.gpu_stats:
|
51
|
+
self.gpu_stats: list[dict[str, Any]] = []
|
50
52
|
|
51
53
|
try:
|
52
54
|
check_requirements("nvidia-ml-py>=12.0.0")
|
@@ -78,13 +80,12 @@ class GPUInfo:
|
|
78
80
|
|
79
81
|
try:
|
80
82
|
device_count = self.pynvml.nvmlDeviceGetCount()
|
81
|
-
for i in range(device_count)
|
82
|
-
self.gpu_stats.append(self._get_device_stats(i))
|
83
|
+
self.gpu_stats.extend(self._get_device_stats(i) for i in range(device_count))
|
83
84
|
except Exception as e:
|
84
85
|
LOGGER.warning(f"Error during device query: {e}")
|
85
86
|
self.gpu_stats = []
|
86
87
|
|
87
|
-
def _get_device_stats(self, index: int) ->
|
88
|
+
def _get_device_stats(self, index: int) -> dict[str, Any]:
|
88
89
|
"""Get stats for a single GPU device."""
|
89
90
|
handle = self.pynvml.nvmlDeviceGetHandleByIndex(index)
|
90
91
|
memory = self.pynvml.nvmlDeviceGetMemoryInfo(handle)
|
@@ -135,7 +136,7 @@ class GPUInfo:
|
|
135
136
|
|
136
137
|
def select_idle_gpu(
|
137
138
|
self, count: int = 1, min_memory_fraction: float = 0, min_util_fraction: float = 0
|
138
|
-
) ->
|
139
|
+
) -> list[int]:
|
139
140
|
"""
|
140
141
|
Select the most idle GPUs based on utilization and free memory.
|
141
142
|
|
@@ -195,12 +196,11 @@ if __name__ == "__main__":
|
|
195
196
|
gpu_info = GPUInfo()
|
196
197
|
gpu_info.print_status()
|
197
198
|
|
198
|
-
selected
|
199
|
+
if selected := gpu_info.select_idle_gpu(
|
199
200
|
count=num_gpus_to_select,
|
200
201
|
min_memory_fraction=required_free_mem_fraction,
|
201
202
|
min_util_fraction=required_free_util_fraction,
|
202
|
-
)
|
203
|
-
if selected:
|
203
|
+
):
|
204
204
|
print(f"\n==> Using selected GPU indices: {selected}")
|
205
205
|
devices = [f"cuda:{idx}" for idx in selected]
|
206
206
|
print(f" Target devices: {devices}")
|
ultralytics/utils/benchmarks.py
CHANGED
@@ -27,6 +27,8 @@ IMX | `imx` | yolo11n_imx_model/
|
|
27
27
|
RKNN | `rknn` | yolo11n_rknn_model/
|
28
28
|
"""
|
29
29
|
|
30
|
+
from __future__ import annotations
|
31
|
+
|
30
32
|
import glob
|
31
33
|
import os
|
32
34
|
import platform
|
@@ -34,7 +36,6 @@ import re
|
|
34
36
|
import shutil
|
35
37
|
import time
|
36
38
|
from pathlib import Path
|
37
|
-
from typing import List, Optional, Tuple, Union
|
38
39
|
|
39
40
|
import numpy as np
|
40
41
|
import torch.cuda
|
@@ -400,14 +401,14 @@ class ProfileModels:
|
|
400
401
|
|
401
402
|
def __init__(
|
402
403
|
self,
|
403
|
-
paths:
|
404
|
+
paths: list[str],
|
404
405
|
num_timed_runs: int = 100,
|
405
406
|
num_warmup_runs: int = 10,
|
406
407
|
min_time: float = 60,
|
407
408
|
imgsz: int = 640,
|
408
409
|
half: bool = True,
|
409
410
|
trt: bool = True,
|
410
|
-
device:
|
411
|
+
device: torch.device | str | None = None,
|
411
412
|
):
|
412
413
|
"""
|
413
414
|
Initialize the ProfileModels class for profiling models.
|
@@ -650,9 +651,9 @@ class ProfileModels:
|
|
650
651
|
def generate_table_row(
|
651
652
|
self,
|
652
653
|
model_name: str,
|
653
|
-
t_onnx:
|
654
|
-
t_engine:
|
655
|
-
model_info:
|
654
|
+
t_onnx: tuple[float, float],
|
655
|
+
t_engine: tuple[float, float],
|
656
|
+
model_info: tuple[float, float, float, float],
|
656
657
|
):
|
657
658
|
"""
|
658
659
|
Generate a table row string with model performance metrics.
|
@@ -675,9 +676,9 @@ class ProfileModels:
|
|
675
676
|
@staticmethod
|
676
677
|
def generate_results_dict(
|
677
678
|
model_name: str,
|
678
|
-
t_onnx:
|
679
|
-
t_engine:
|
680
|
-
model_info:
|
679
|
+
t_onnx: tuple[float, float],
|
680
|
+
t_engine: tuple[float, float],
|
681
|
+
model_info: tuple[float, float, float, float],
|
681
682
|
):
|
682
683
|
"""
|
683
684
|
Generate a dictionary of profiling results.
|
@@ -701,7 +702,7 @@ class ProfileModels:
|
|
701
702
|
}
|
702
703
|
|
703
704
|
@staticmethod
|
704
|
-
def print_table(table_rows:
|
705
|
+
def print_table(table_rows: list[str]):
|
705
706
|
"""
|
706
707
|
Print a formatted table of model profiling results.
|
707
708
|
|
@@ -1,8 +1,10 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
from collections.abc import Callable
|
4
6
|
from types import SimpleNamespace
|
5
|
-
from typing import Any
|
7
|
+
from typing import Any
|
6
8
|
|
7
9
|
import cv2
|
8
10
|
import numpy as np
|
@@ -147,7 +149,7 @@ def _fetch_trainer_metadata(trainer) -> dict:
|
|
147
149
|
|
148
150
|
def _scale_bounding_box_to_original_image_shape(
|
149
151
|
box, resized_image_shape, original_image_shape, ratio_pad
|
150
|
-
) ->
|
152
|
+
) -> list[float]:
|
151
153
|
"""
|
152
154
|
Scale bounding box from resized image coordinates to original image coordinates.
|
153
155
|
|
@@ -178,7 +180,7 @@ def _scale_bounding_box_to_original_image_shape(
|
|
178
180
|
return box
|
179
181
|
|
180
182
|
|
181
|
-
def _format_ground_truth_annotations_for_detection(img_idx, image_path, batch, class_name_map=None) ->
|
183
|
+
def _format_ground_truth_annotations_for_detection(img_idx, image_path, batch, class_name_map=None) -> dict | None:
|
182
184
|
"""
|
183
185
|
Format ground truth annotations for object detection.
|
184
186
|
|
@@ -233,7 +235,7 @@ def _format_ground_truth_annotations_for_detection(img_idx, image_path, batch, c
|
|
233
235
|
return {"name": "ground_truth", "data": data}
|
234
236
|
|
235
237
|
|
236
|
-
def _format_prediction_annotations(image_path, metadata, class_label_map=None, class_map=None) ->
|
238
|
+
def _format_prediction_annotations(image_path, metadata, class_label_map=None, class_map=None) -> dict | None:
|
237
239
|
"""
|
238
240
|
Format YOLO predictions for object detection visualization.
|
239
241
|
|
@@ -286,7 +288,7 @@ def _format_prediction_annotations(image_path, metadata, class_label_map=None, c
|
|
286
288
|
return {"name": "prediction", "data": data}
|
287
289
|
|
288
290
|
|
289
|
-
def _extract_segmentation_annotation(segmentation_raw: str, decode: Callable) ->
|
291
|
+
def _extract_segmentation_annotation(segmentation_raw: str, decode: Callable) -> list[list[Any]] | None:
|
290
292
|
"""
|
291
293
|
Extract segmentation annotation from compressed segmentations as list of polygons.
|
292
294
|
|
@@ -307,9 +309,7 @@ def _extract_segmentation_annotation(segmentation_raw: str, decode: Callable) ->
|
|
307
309
|
return None
|
308
310
|
|
309
311
|
|
310
|
-
def _fetch_annotations(
|
311
|
-
img_idx, image_path, batch, prediction_metadata_map, class_label_map, class_map
|
312
|
-
) -> Optional[List]:
|
312
|
+
def _fetch_annotations(img_idx, image_path, batch, prediction_metadata_map, class_label_map, class_map) -> list | None:
|
313
313
|
"""
|
314
314
|
Join the ground truth and prediction annotations if they exist.
|
315
315
|
|
@@ -356,7 +356,7 @@ def _log_confusion_matrix(experiment, trainer, curr_step, curr_epoch) -> None:
|
|
356
356
|
)
|
357
357
|
|
358
358
|
|
359
|
-
def _log_images(experiment, image_paths, curr_step:
|
359
|
+
def _log_images(experiment, image_paths, curr_step: int | None, annotations=None) -> None:
|
360
360
|
"""
|
361
361
|
Log images to the experiment with optional annotations.
|
362
362
|
|
ultralytics/utils/checks.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
import functools
|
4
6
|
import glob
|
5
7
|
import inspect
|
@@ -13,7 +15,6 @@ import time
|
|
13
15
|
from importlib import metadata
|
14
16
|
from pathlib import Path
|
15
17
|
from types import SimpleNamespace
|
16
|
-
from typing import Optional
|
17
18
|
|
18
19
|
import cv2
|
19
20
|
import numpy as np
|
@@ -23,8 +24,8 @@ from ultralytics.utils import (
|
|
23
24
|
ARM64,
|
24
25
|
ASSETS,
|
25
26
|
AUTOINSTALL,
|
27
|
+
GIT,
|
26
28
|
IS_COLAB,
|
27
|
-
IS_GIT_DIR,
|
28
29
|
IS_JETSON,
|
29
30
|
IS_KAGGLE,
|
30
31
|
IS_PIP_PACKAGE,
|
@@ -401,16 +402,22 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
|
|
401
402
|
def attempt_install(packages, commands, use_uv):
|
402
403
|
"""Attempt package installation with uv if available, falling back to pip."""
|
403
404
|
if use_uv:
|
404
|
-
base =
|
405
|
+
base = (
|
406
|
+
f"uv pip install --no-cache-dir {packages} {commands} "
|
407
|
+
f"--index-strategy=unsafe-best-match --break-system-packages --prerelease=allow"
|
408
|
+
)
|
405
409
|
try:
|
406
|
-
return subprocess.check_output(base, shell=True, stderr=subprocess.PIPE)
|
410
|
+
return subprocess.check_output(base, shell=True, stderr=subprocess.PIPE, text=True)
|
407
411
|
except subprocess.CalledProcessError as e:
|
408
|
-
if e.stderr and "No virtual environment found" in e.stderr
|
412
|
+
if e.stderr and "No virtual environment found" in e.stderr:
|
409
413
|
return subprocess.check_output(
|
410
|
-
base.replace("uv pip install", "uv pip install --system"),
|
411
|
-
|
414
|
+
base.replace("uv pip install", "uv pip install --system"),
|
415
|
+
shell=True,
|
416
|
+
stderr=subprocess.PIPE,
|
417
|
+
text=True,
|
418
|
+
)
|
412
419
|
raise
|
413
|
-
return subprocess.check_output(f"pip install --no-cache-dir {packages} {commands}", shell=True)
|
420
|
+
return subprocess.check_output(f"pip install --no-cache-dir {packages} {commands}", shell=True, text=True)
|
414
421
|
|
415
422
|
s = " ".join(f'"{x}"' for x in pkgs) # console string
|
416
423
|
if s:
|
@@ -684,7 +691,7 @@ def collect_system_info():
|
|
684
691
|
"OS": platform.platform(),
|
685
692
|
"Environment": ENVIRONMENT,
|
686
693
|
"Python": PYTHON_VERSION,
|
687
|
-
"Install": "git" if
|
694
|
+
"Install": "git" if GIT.is_repo else "pip" if IS_PIP_PACKAGE else "other",
|
688
695
|
"Path": str(ROOT),
|
689
696
|
"RAM": f"{psutil.virtual_memory().total / gib:.2f} GB",
|
690
697
|
"Disk": f"{(total - free) / gib:.1f}/{total / gib:.1f} GB",
|
@@ -797,23 +804,7 @@ def check_amp(model):
|
|
797
804
|
return True
|
798
805
|
|
799
806
|
|
800
|
-
def
|
801
|
-
"""
|
802
|
-
Return human-readable git description, i.e. v5.0-5-g3e25f1e https://git-scm.com/docs/git-describe.
|
803
|
-
|
804
|
-
Args:
|
805
|
-
path (Path): Path to git repository.
|
806
|
-
|
807
|
-
Returns:
|
808
|
-
(str): Human-readable git description.
|
809
|
-
"""
|
810
|
-
try:
|
811
|
-
return subprocess.check_output(f"git -C {path} describe --tags --long --always", shell=True).decode()[:-1]
|
812
|
-
except Exception:
|
813
|
-
return ""
|
814
|
-
|
815
|
-
|
816
|
-
def print_args(args: Optional[dict] = None, show_file=True, show_func=False):
|
807
|
+
def print_args(args: dict | None = None, show_file=True, show_func=False):
|
817
808
|
"""
|
818
809
|
Print function arguments (optional args dict).
|
819
810
|
|
ultralytics/utils/export.py
CHANGED
@@ -1,8 +1,9 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
import json
|
4
6
|
from pathlib import Path
|
5
|
-
from typing import Dict, List, Optional, Tuple, Union
|
6
7
|
|
7
8
|
import torch
|
8
9
|
|
@@ -14,9 +15,9 @@ def export_onnx(
|
|
14
15
|
im: torch.Tensor,
|
15
16
|
onnx_file: str,
|
16
17
|
opset: int = 14,
|
17
|
-
input_names:
|
18
|
-
output_names:
|
19
|
-
dynamic:
|
18
|
+
input_names: list[str] = ["images"],
|
19
|
+
output_names: list[str] = ["output0"],
|
20
|
+
dynamic: bool | dict = False,
|
20
21
|
) -> None:
|
21
22
|
"""
|
22
23
|
Export a PyTorch model to ONNX format.
|
@@ -48,15 +49,15 @@ def export_onnx(
|
|
48
49
|
|
49
50
|
def export_engine(
|
50
51
|
onnx_file: str,
|
51
|
-
engine_file:
|
52
|
-
workspace:
|
52
|
+
engine_file: str | None = None,
|
53
|
+
workspace: int | None = None,
|
53
54
|
half: bool = False,
|
54
55
|
int8: bool = False,
|
55
56
|
dynamic: bool = False,
|
56
|
-
shape:
|
57
|
-
dla:
|
57
|
+
shape: tuple[int, int, int, int] = (1, 3, 640, 640),
|
58
|
+
dla: int | None = None,
|
58
59
|
dataset=None,
|
59
|
-
metadata:
|
60
|
+
metadata: dict | None = None,
|
60
61
|
verbose: bool = False,
|
61
62
|
prefix: str = "",
|
62
63
|
) -> None:
|
@@ -196,7 +197,7 @@ def export_engine(
|
|
196
197
|
"""Get the batch size to use for calibration."""
|
197
198
|
return self.batch or 1
|
198
199
|
|
199
|
-
def get_batch(self, names) ->
|
200
|
+
def get_batch(self, names) -> list[int] | None:
|
200
201
|
"""Get the next batch to use for calibration, as a list of device memory pointers."""
|
201
202
|
try:
|
202
203
|
im0s = next(self.data_iter)["img"] / 255.0
|
@@ -206,7 +207,7 @@ def export_engine(
|
|
206
207
|
# Return None to signal to TensorRT there is no calibration data remaining
|
207
208
|
return None
|
208
209
|
|
209
|
-
def read_calibration_cache(self) ->
|
210
|
+
def read_calibration_cache(self) -> bytes | None:
|
210
211
|
"""Use existing cache instead of calibrating again, otherwise, implicitly return None."""
|
211
212
|
if self.cache.exists() and self.cache.suffix == ".cache":
|
212
213
|
return self.cache.read_bytes()
|
ultralytics/utils/files.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
import contextlib
|
4
6
|
import glob
|
5
7
|
import os
|
@@ -8,7 +10,6 @@ import tempfile
|
|
8
10
|
from contextlib import contextmanager
|
9
11
|
from datetime import datetime
|
10
12
|
from pathlib import Path
|
11
|
-
from typing import Union
|
12
13
|
|
13
14
|
|
14
15
|
class WorkingDirectory(contextlib.ContextDecorator):
|
@@ -39,7 +40,7 @@ class WorkingDirectory(contextlib.ContextDecorator):
|
|
39
40
|
>>> pass
|
40
41
|
"""
|
41
42
|
|
42
|
-
def __init__(self, new_dir:
|
43
|
+
def __init__(self, new_dir: str | Path):
|
43
44
|
"""Initialize the WorkingDirectory context manager with the target directory."""
|
44
45
|
self.dir = new_dir # new dir
|
45
46
|
self.cwd = Path.cwd().resolve() # current dir
|
@@ -54,7 +55,7 @@ class WorkingDirectory(contextlib.ContextDecorator):
|
|
54
55
|
|
55
56
|
|
56
57
|
@contextmanager
|
57
|
-
def spaces_in_path(path:
|
58
|
+
def spaces_in_path(path: str | Path):
|
58
59
|
"""
|
59
60
|
Context manager to handle paths with spaces in their names.
|
60
61
|
|
@@ -105,7 +106,7 @@ def spaces_in_path(path: Union[str, Path]):
|
|
105
106
|
yield path
|
106
107
|
|
107
108
|
|
108
|
-
def increment_path(path:
|
109
|
+
def increment_path(path: str | Path, exist_ok: bool = False, sep: str = "", mkdir: bool = False) -> Path:
|
109
110
|
"""
|
110
111
|
Increment a file or directory path, i.e., runs/exp --> runs/exp{sep}2, runs/exp{sep}3, ... etc.
|
111
112
|
|
@@ -153,19 +154,19 @@ def increment_path(path: Union[str, Path], exist_ok: bool = False, sep: str = ""
|
|
153
154
|
return path
|
154
155
|
|
155
156
|
|
156
|
-
def file_age(path:
|
157
|
+
def file_age(path: str | Path = __file__) -> int:
|
157
158
|
"""Return days since the last modification of the specified file."""
|
158
159
|
dt = datetime.now() - datetime.fromtimestamp(Path(path).stat().st_mtime) # delta
|
159
160
|
return dt.days # + dt.seconds / 86400 # fractional days
|
160
161
|
|
161
162
|
|
162
|
-
def file_date(path:
|
163
|
+
def file_date(path: str | Path = __file__) -> str:
|
163
164
|
"""Return the file modification date in 'YYYY-M-D' format."""
|
164
165
|
t = datetime.fromtimestamp(Path(path).stat().st_mtime)
|
165
166
|
return f"{t.year}-{t.month}-{t.day}"
|
166
167
|
|
167
168
|
|
168
|
-
def file_size(path:
|
169
|
+
def file_size(path: str | Path) -> float:
|
169
170
|
"""Return the size of a file or directory in megabytes (MB)."""
|
170
171
|
if isinstance(path, (str, Path)):
|
171
172
|
mb = 1 << 20 # bytes to MiB (1024 ** 2)
|