ultralytics-opencv-headless 8.3.246__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (298) hide show
  1. tests/__init__.py +23 -0
  2. tests/conftest.py +59 -0
  3. tests/test_cli.py +131 -0
  4. tests/test_cuda.py +216 -0
  5. tests/test_engine.py +157 -0
  6. tests/test_exports.py +309 -0
  7. tests/test_integrations.py +151 -0
  8. tests/test_python.py +777 -0
  9. tests/test_solutions.py +371 -0
  10. ultralytics/__init__.py +48 -0
  11. ultralytics/assets/bus.jpg +0 -0
  12. ultralytics/assets/zidane.jpg +0 -0
  13. ultralytics/cfg/__init__.py +1026 -0
  14. ultralytics/cfg/datasets/Argoverse.yaml +78 -0
  15. ultralytics/cfg/datasets/DOTAv1.5.yaml +37 -0
  16. ultralytics/cfg/datasets/DOTAv1.yaml +36 -0
  17. ultralytics/cfg/datasets/GlobalWheat2020.yaml +68 -0
  18. ultralytics/cfg/datasets/HomeObjects-3K.yaml +32 -0
  19. ultralytics/cfg/datasets/ImageNet.yaml +2025 -0
  20. ultralytics/cfg/datasets/Objects365.yaml +447 -0
  21. ultralytics/cfg/datasets/SKU-110K.yaml +58 -0
  22. ultralytics/cfg/datasets/VOC.yaml +102 -0
  23. ultralytics/cfg/datasets/VisDrone.yaml +87 -0
  24. ultralytics/cfg/datasets/african-wildlife.yaml +25 -0
  25. ultralytics/cfg/datasets/brain-tumor.yaml +22 -0
  26. ultralytics/cfg/datasets/carparts-seg.yaml +44 -0
  27. ultralytics/cfg/datasets/coco-pose.yaml +64 -0
  28. ultralytics/cfg/datasets/coco.yaml +118 -0
  29. ultralytics/cfg/datasets/coco128-seg.yaml +101 -0
  30. ultralytics/cfg/datasets/coco128.yaml +101 -0
  31. ultralytics/cfg/datasets/coco8-grayscale.yaml +103 -0
  32. ultralytics/cfg/datasets/coco8-multispectral.yaml +104 -0
  33. ultralytics/cfg/datasets/coco8-pose.yaml +47 -0
  34. ultralytics/cfg/datasets/coco8-seg.yaml +101 -0
  35. ultralytics/cfg/datasets/coco8.yaml +101 -0
  36. ultralytics/cfg/datasets/construction-ppe.yaml +32 -0
  37. ultralytics/cfg/datasets/crack-seg.yaml +22 -0
  38. ultralytics/cfg/datasets/dog-pose.yaml +52 -0
  39. ultralytics/cfg/datasets/dota8-multispectral.yaml +38 -0
  40. ultralytics/cfg/datasets/dota8.yaml +35 -0
  41. ultralytics/cfg/datasets/hand-keypoints.yaml +50 -0
  42. ultralytics/cfg/datasets/kitti.yaml +27 -0
  43. ultralytics/cfg/datasets/lvis.yaml +1240 -0
  44. ultralytics/cfg/datasets/medical-pills.yaml +21 -0
  45. ultralytics/cfg/datasets/open-images-v7.yaml +663 -0
  46. ultralytics/cfg/datasets/package-seg.yaml +22 -0
  47. ultralytics/cfg/datasets/signature.yaml +21 -0
  48. ultralytics/cfg/datasets/tiger-pose.yaml +41 -0
  49. ultralytics/cfg/datasets/xView.yaml +155 -0
  50. ultralytics/cfg/default.yaml +130 -0
  51. ultralytics/cfg/models/11/yolo11-cls-resnet18.yaml +17 -0
  52. ultralytics/cfg/models/11/yolo11-cls.yaml +33 -0
  53. ultralytics/cfg/models/11/yolo11-obb.yaml +50 -0
  54. ultralytics/cfg/models/11/yolo11-pose.yaml +51 -0
  55. ultralytics/cfg/models/11/yolo11-seg.yaml +50 -0
  56. ultralytics/cfg/models/11/yolo11.yaml +50 -0
  57. ultralytics/cfg/models/11/yoloe-11-seg.yaml +48 -0
  58. ultralytics/cfg/models/11/yoloe-11.yaml +48 -0
  59. ultralytics/cfg/models/12/yolo12-cls.yaml +32 -0
  60. ultralytics/cfg/models/12/yolo12-obb.yaml +48 -0
  61. ultralytics/cfg/models/12/yolo12-pose.yaml +49 -0
  62. ultralytics/cfg/models/12/yolo12-seg.yaml +48 -0
  63. ultralytics/cfg/models/12/yolo12.yaml +48 -0
  64. ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +53 -0
  65. ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +45 -0
  66. ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +45 -0
  67. ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +57 -0
  68. ultralytics/cfg/models/v10/yolov10b.yaml +45 -0
  69. ultralytics/cfg/models/v10/yolov10l.yaml +45 -0
  70. ultralytics/cfg/models/v10/yolov10m.yaml +45 -0
  71. ultralytics/cfg/models/v10/yolov10n.yaml +45 -0
  72. ultralytics/cfg/models/v10/yolov10s.yaml +45 -0
  73. ultralytics/cfg/models/v10/yolov10x.yaml +45 -0
  74. ultralytics/cfg/models/v3/yolov3-spp.yaml +49 -0
  75. ultralytics/cfg/models/v3/yolov3-tiny.yaml +40 -0
  76. ultralytics/cfg/models/v3/yolov3.yaml +49 -0
  77. ultralytics/cfg/models/v5/yolov5-p6.yaml +62 -0
  78. ultralytics/cfg/models/v5/yolov5.yaml +51 -0
  79. ultralytics/cfg/models/v6/yolov6.yaml +56 -0
  80. ultralytics/cfg/models/v8/yoloe-v8-seg.yaml +48 -0
  81. ultralytics/cfg/models/v8/yoloe-v8.yaml +48 -0
  82. ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +28 -0
  83. ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +28 -0
  84. ultralytics/cfg/models/v8/yolov8-cls.yaml +32 -0
  85. ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +58 -0
  86. ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +60 -0
  87. ultralytics/cfg/models/v8/yolov8-ghost.yaml +50 -0
  88. ultralytics/cfg/models/v8/yolov8-obb.yaml +49 -0
  89. ultralytics/cfg/models/v8/yolov8-p2.yaml +57 -0
  90. ultralytics/cfg/models/v8/yolov8-p6.yaml +59 -0
  91. ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +60 -0
  92. ultralytics/cfg/models/v8/yolov8-pose.yaml +50 -0
  93. ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +49 -0
  94. ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +59 -0
  95. ultralytics/cfg/models/v8/yolov8-seg.yaml +49 -0
  96. ultralytics/cfg/models/v8/yolov8-world.yaml +51 -0
  97. ultralytics/cfg/models/v8/yolov8-worldv2.yaml +49 -0
  98. ultralytics/cfg/models/v8/yolov8.yaml +49 -0
  99. ultralytics/cfg/models/v9/yolov9c-seg.yaml +41 -0
  100. ultralytics/cfg/models/v9/yolov9c.yaml +41 -0
  101. ultralytics/cfg/models/v9/yolov9e-seg.yaml +64 -0
  102. ultralytics/cfg/models/v9/yolov9e.yaml +64 -0
  103. ultralytics/cfg/models/v9/yolov9m.yaml +41 -0
  104. ultralytics/cfg/models/v9/yolov9s.yaml +41 -0
  105. ultralytics/cfg/models/v9/yolov9t.yaml +41 -0
  106. ultralytics/cfg/trackers/botsort.yaml +21 -0
  107. ultralytics/cfg/trackers/bytetrack.yaml +12 -0
  108. ultralytics/data/__init__.py +26 -0
  109. ultralytics/data/annotator.py +66 -0
  110. ultralytics/data/augment.py +2801 -0
  111. ultralytics/data/base.py +435 -0
  112. ultralytics/data/build.py +437 -0
  113. ultralytics/data/converter.py +855 -0
  114. ultralytics/data/dataset.py +834 -0
  115. ultralytics/data/loaders.py +704 -0
  116. ultralytics/data/scripts/download_weights.sh +18 -0
  117. ultralytics/data/scripts/get_coco.sh +61 -0
  118. ultralytics/data/scripts/get_coco128.sh +18 -0
  119. ultralytics/data/scripts/get_imagenet.sh +52 -0
  120. ultralytics/data/split.py +138 -0
  121. ultralytics/data/split_dota.py +344 -0
  122. ultralytics/data/utils.py +798 -0
  123. ultralytics/engine/__init__.py +1 -0
  124. ultralytics/engine/exporter.py +1578 -0
  125. ultralytics/engine/model.py +1124 -0
  126. ultralytics/engine/predictor.py +508 -0
  127. ultralytics/engine/results.py +1522 -0
  128. ultralytics/engine/trainer.py +974 -0
  129. ultralytics/engine/tuner.py +448 -0
  130. ultralytics/engine/validator.py +384 -0
  131. ultralytics/hub/__init__.py +166 -0
  132. ultralytics/hub/auth.py +151 -0
  133. ultralytics/hub/google/__init__.py +174 -0
  134. ultralytics/hub/session.py +422 -0
  135. ultralytics/hub/utils.py +162 -0
  136. ultralytics/models/__init__.py +9 -0
  137. ultralytics/models/fastsam/__init__.py +7 -0
  138. ultralytics/models/fastsam/model.py +79 -0
  139. ultralytics/models/fastsam/predict.py +169 -0
  140. ultralytics/models/fastsam/utils.py +23 -0
  141. ultralytics/models/fastsam/val.py +38 -0
  142. ultralytics/models/nas/__init__.py +7 -0
  143. ultralytics/models/nas/model.py +98 -0
  144. ultralytics/models/nas/predict.py +56 -0
  145. ultralytics/models/nas/val.py +38 -0
  146. ultralytics/models/rtdetr/__init__.py +7 -0
  147. ultralytics/models/rtdetr/model.py +63 -0
  148. ultralytics/models/rtdetr/predict.py +88 -0
  149. ultralytics/models/rtdetr/train.py +89 -0
  150. ultralytics/models/rtdetr/val.py +216 -0
  151. ultralytics/models/sam/__init__.py +25 -0
  152. ultralytics/models/sam/amg.py +275 -0
  153. ultralytics/models/sam/build.py +365 -0
  154. ultralytics/models/sam/build_sam3.py +377 -0
  155. ultralytics/models/sam/model.py +169 -0
  156. ultralytics/models/sam/modules/__init__.py +1 -0
  157. ultralytics/models/sam/modules/blocks.py +1067 -0
  158. ultralytics/models/sam/modules/decoders.py +495 -0
  159. ultralytics/models/sam/modules/encoders.py +794 -0
  160. ultralytics/models/sam/modules/memory_attention.py +298 -0
  161. ultralytics/models/sam/modules/sam.py +1160 -0
  162. ultralytics/models/sam/modules/tiny_encoder.py +979 -0
  163. ultralytics/models/sam/modules/transformer.py +344 -0
  164. ultralytics/models/sam/modules/utils.py +512 -0
  165. ultralytics/models/sam/predict.py +3940 -0
  166. ultralytics/models/sam/sam3/__init__.py +3 -0
  167. ultralytics/models/sam/sam3/decoder.py +546 -0
  168. ultralytics/models/sam/sam3/encoder.py +529 -0
  169. ultralytics/models/sam/sam3/geometry_encoders.py +415 -0
  170. ultralytics/models/sam/sam3/maskformer_segmentation.py +286 -0
  171. ultralytics/models/sam/sam3/model_misc.py +199 -0
  172. ultralytics/models/sam/sam3/necks.py +129 -0
  173. ultralytics/models/sam/sam3/sam3_image.py +339 -0
  174. ultralytics/models/sam/sam3/text_encoder_ve.py +307 -0
  175. ultralytics/models/sam/sam3/vitdet.py +547 -0
  176. ultralytics/models/sam/sam3/vl_combiner.py +160 -0
  177. ultralytics/models/utils/__init__.py +1 -0
  178. ultralytics/models/utils/loss.py +466 -0
  179. ultralytics/models/utils/ops.py +315 -0
  180. ultralytics/models/yolo/__init__.py +7 -0
  181. ultralytics/models/yolo/classify/__init__.py +7 -0
  182. ultralytics/models/yolo/classify/predict.py +90 -0
  183. ultralytics/models/yolo/classify/train.py +202 -0
  184. ultralytics/models/yolo/classify/val.py +216 -0
  185. ultralytics/models/yolo/detect/__init__.py +7 -0
  186. ultralytics/models/yolo/detect/predict.py +122 -0
  187. ultralytics/models/yolo/detect/train.py +227 -0
  188. ultralytics/models/yolo/detect/val.py +507 -0
  189. ultralytics/models/yolo/model.py +430 -0
  190. ultralytics/models/yolo/obb/__init__.py +7 -0
  191. ultralytics/models/yolo/obb/predict.py +56 -0
  192. ultralytics/models/yolo/obb/train.py +79 -0
  193. ultralytics/models/yolo/obb/val.py +302 -0
  194. ultralytics/models/yolo/pose/__init__.py +7 -0
  195. ultralytics/models/yolo/pose/predict.py +65 -0
  196. ultralytics/models/yolo/pose/train.py +110 -0
  197. ultralytics/models/yolo/pose/val.py +248 -0
  198. ultralytics/models/yolo/segment/__init__.py +7 -0
  199. ultralytics/models/yolo/segment/predict.py +109 -0
  200. ultralytics/models/yolo/segment/train.py +69 -0
  201. ultralytics/models/yolo/segment/val.py +307 -0
  202. ultralytics/models/yolo/world/__init__.py +5 -0
  203. ultralytics/models/yolo/world/train.py +173 -0
  204. ultralytics/models/yolo/world/train_world.py +178 -0
  205. ultralytics/models/yolo/yoloe/__init__.py +22 -0
  206. ultralytics/models/yolo/yoloe/predict.py +162 -0
  207. ultralytics/models/yolo/yoloe/train.py +287 -0
  208. ultralytics/models/yolo/yoloe/train_seg.py +122 -0
  209. ultralytics/models/yolo/yoloe/val.py +206 -0
  210. ultralytics/nn/__init__.py +27 -0
  211. ultralytics/nn/autobackend.py +958 -0
  212. ultralytics/nn/modules/__init__.py +182 -0
  213. ultralytics/nn/modules/activation.py +54 -0
  214. ultralytics/nn/modules/block.py +1947 -0
  215. ultralytics/nn/modules/conv.py +669 -0
  216. ultralytics/nn/modules/head.py +1183 -0
  217. ultralytics/nn/modules/transformer.py +793 -0
  218. ultralytics/nn/modules/utils.py +159 -0
  219. ultralytics/nn/tasks.py +1768 -0
  220. ultralytics/nn/text_model.py +356 -0
  221. ultralytics/py.typed +1 -0
  222. ultralytics/solutions/__init__.py +41 -0
  223. ultralytics/solutions/ai_gym.py +108 -0
  224. ultralytics/solutions/analytics.py +264 -0
  225. ultralytics/solutions/config.py +107 -0
  226. ultralytics/solutions/distance_calculation.py +123 -0
  227. ultralytics/solutions/heatmap.py +125 -0
  228. ultralytics/solutions/instance_segmentation.py +86 -0
  229. ultralytics/solutions/object_blurrer.py +89 -0
  230. ultralytics/solutions/object_counter.py +190 -0
  231. ultralytics/solutions/object_cropper.py +87 -0
  232. ultralytics/solutions/parking_management.py +280 -0
  233. ultralytics/solutions/queue_management.py +93 -0
  234. ultralytics/solutions/region_counter.py +133 -0
  235. ultralytics/solutions/security_alarm.py +151 -0
  236. ultralytics/solutions/similarity_search.py +219 -0
  237. ultralytics/solutions/solutions.py +828 -0
  238. ultralytics/solutions/speed_estimation.py +114 -0
  239. ultralytics/solutions/streamlit_inference.py +260 -0
  240. ultralytics/solutions/templates/similarity-search.html +156 -0
  241. ultralytics/solutions/trackzone.py +88 -0
  242. ultralytics/solutions/vision_eye.py +67 -0
  243. ultralytics/trackers/__init__.py +7 -0
  244. ultralytics/trackers/basetrack.py +115 -0
  245. ultralytics/trackers/bot_sort.py +257 -0
  246. ultralytics/trackers/byte_tracker.py +469 -0
  247. ultralytics/trackers/track.py +116 -0
  248. ultralytics/trackers/utils/__init__.py +1 -0
  249. ultralytics/trackers/utils/gmc.py +339 -0
  250. ultralytics/trackers/utils/kalman_filter.py +482 -0
  251. ultralytics/trackers/utils/matching.py +154 -0
  252. ultralytics/utils/__init__.py +1450 -0
  253. ultralytics/utils/autobatch.py +118 -0
  254. ultralytics/utils/autodevice.py +205 -0
  255. ultralytics/utils/benchmarks.py +728 -0
  256. ultralytics/utils/callbacks/__init__.py +5 -0
  257. ultralytics/utils/callbacks/base.py +233 -0
  258. ultralytics/utils/callbacks/clearml.py +146 -0
  259. ultralytics/utils/callbacks/comet.py +625 -0
  260. ultralytics/utils/callbacks/dvc.py +197 -0
  261. ultralytics/utils/callbacks/hub.py +110 -0
  262. ultralytics/utils/callbacks/mlflow.py +134 -0
  263. ultralytics/utils/callbacks/neptune.py +126 -0
  264. ultralytics/utils/callbacks/platform.py +313 -0
  265. ultralytics/utils/callbacks/raytune.py +42 -0
  266. ultralytics/utils/callbacks/tensorboard.py +123 -0
  267. ultralytics/utils/callbacks/wb.py +188 -0
  268. ultralytics/utils/checks.py +1006 -0
  269. ultralytics/utils/cpu.py +85 -0
  270. ultralytics/utils/dist.py +123 -0
  271. ultralytics/utils/downloads.py +529 -0
  272. ultralytics/utils/errors.py +35 -0
  273. ultralytics/utils/events.py +113 -0
  274. ultralytics/utils/export/__init__.py +7 -0
  275. ultralytics/utils/export/engine.py +237 -0
  276. ultralytics/utils/export/imx.py +315 -0
  277. ultralytics/utils/export/tensorflow.py +231 -0
  278. ultralytics/utils/files.py +219 -0
  279. ultralytics/utils/git.py +137 -0
  280. ultralytics/utils/instance.py +484 -0
  281. ultralytics/utils/logger.py +501 -0
  282. ultralytics/utils/loss.py +849 -0
  283. ultralytics/utils/metrics.py +1563 -0
  284. ultralytics/utils/nms.py +337 -0
  285. ultralytics/utils/ops.py +664 -0
  286. ultralytics/utils/patches.py +201 -0
  287. ultralytics/utils/plotting.py +1045 -0
  288. ultralytics/utils/tal.py +403 -0
  289. ultralytics/utils/torch_utils.py +984 -0
  290. ultralytics/utils/tqdm.py +440 -0
  291. ultralytics/utils/triton.py +112 -0
  292. ultralytics/utils/tuner.py +160 -0
  293. ultralytics_opencv_headless-8.3.246.dist-info/METADATA +374 -0
  294. ultralytics_opencv_headless-8.3.246.dist-info/RECORD +298 -0
  295. ultralytics_opencv_headless-8.3.246.dist-info/WHEEL +5 -0
  296. ultralytics_opencv_headless-8.3.246.dist-info/entry_points.txt +3 -0
  297. ultralytics_opencv_headless-8.3.246.dist-info/licenses/LICENSE +661 -0
  298. ultralytics_opencv_headless-8.3.246.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1450 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ from __future__ import annotations
4
+
5
+ import contextlib
6
+ import importlib.metadata
7
+ import inspect
8
+ import json
9
+ import logging
10
+ import os
11
+ import platform
12
+ import re
13
+ import socket
14
+ import sys
15
+ import threading
16
+ import time
17
+ import warnings
18
+ from functools import lru_cache
19
+ from pathlib import Path
20
+ from threading import Lock
21
+ from types import SimpleNamespace
22
+ from urllib.parse import unquote
23
+
24
+ import cv2
25
+ import numpy as np
26
+ import torch
27
+
28
+ from ultralytics import __version__
29
+ from ultralytics.utils.git import GitRepo
30
+ from ultralytics.utils.patches import imread, imshow, imwrite, torch_save # for patches
31
+ from ultralytics.utils.tqdm import TQDM # noqa
32
+
33
+ # PyTorch Multi-GPU DDP Constants
34
+ RANK = int(os.getenv("RANK", -1))
35
+ LOCAL_RANK = int(os.getenv("LOCAL_RANK", -1)) # https://pytorch.org/docs/stable/elastic/run.html
36
+
37
+ # Other Constants
38
+ ARGV = sys.argv or ["", ""] # sometimes sys.argv = []
39
+ FILE = Path(__file__).resolve()
40
+ ROOT = FILE.parents[1] # YOLO
41
+ ASSETS = ROOT / "assets" # default images
42
+ ASSETS_URL = "https://github.com/ultralytics/assets/releases/download/v0.0.0" # assets GitHub URL
43
+ DEFAULT_CFG_PATH = ROOT / "cfg/default.yaml"
44
+ NUM_THREADS = min(8, max(1, os.cpu_count() - 1)) # number of YOLO multiprocessing threads
45
+ AUTOINSTALL = str(os.getenv("YOLO_AUTOINSTALL", True)).lower() == "true" # global auto-install mode
46
+ VERBOSE = str(os.getenv("YOLO_VERBOSE", True)).lower() == "true" # global verbose mode
47
+ LOGGING_NAME = "ultralytics"
48
+ MACOS, LINUX, WINDOWS = (platform.system() == x for x in ["Darwin", "Linux", "Windows"]) # environment booleans
49
+ MACOS_VERSION = platform.mac_ver()[0] if MACOS else None
50
+ NOT_MACOS14 = not (MACOS and MACOS_VERSION.startswith("14."))
51
+ ARM64 = platform.machine() in {"arm64", "aarch64"} # ARM64 booleans
52
+ PYTHON_VERSION = platform.python_version()
53
+ TORCH_VERSION = str(torch.__version__) # Normalize torch.__version__ (PyTorch>1.9 returns TorchVersion objects)
54
+ TORCHVISION_VERSION = importlib.metadata.version("torchvision") # faster than importing torchvision
55
+ IS_VSCODE = os.environ.get("TERM_PROGRAM", False) == "vscode"
56
+ RKNN_CHIPS = frozenset(
57
+ {
58
+ "rk3588",
59
+ "rk3576",
60
+ "rk3566",
61
+ "rk3568",
62
+ "rk3562",
63
+ "rv1103",
64
+ "rv1106",
65
+ "rv1103b",
66
+ "rv1106b",
67
+ "rk2118",
68
+ "rv1126b",
69
+ }
70
+ ) # Rockchip processors available for export
71
+ HELP_MSG = """
72
+ Examples for running Ultralytics:
73
+
74
+ 1. Install the ultralytics package:
75
+
76
+ pip install ultralytics
77
+
78
+ 2. Use the Python SDK:
79
+
80
+ from ultralytics import YOLO
81
+
82
+ # Load a model
83
+ model = YOLO("yolo11n.yaml") # build a new model from scratch
84
+ model = YOLO("yolo11n.pt") # load a pretrained model (recommended for training)
85
+
86
+ # Use the model
87
+ results = model.train(data="coco8.yaml", epochs=3) # train the model
88
+ results = model.val() # evaluate model performance on the validation set
89
+ results = model("https://ultralytics.com/images/bus.jpg") # predict on an image
90
+ success = model.export(format="onnx") # export the model to ONNX format
91
+
92
+ 3. Use the command line interface (CLI):
93
+
94
+ Ultralytics 'yolo' CLI commands use the following syntax:
95
+
96
+ yolo TASK MODE ARGS
97
+
98
+ Where TASK (optional) is one of [detect, segment, classify, pose, obb]
99
+ MODE (required) is one of [train, val, predict, export, track, benchmark]
100
+ ARGS (optional) are any number of custom "arg=value" pairs like "imgsz=320" that override defaults.
101
+ See all ARGS at https://docs.ultralytics.com/usage/cfg or with "yolo cfg"
102
+
103
+ - Train a detection model for 10 epochs with an initial learning_rate of 0.01
104
+ yolo detect train data=coco8.yaml model=yolo11n.pt epochs=10 lr0=0.01
105
+
106
+ - Predict a YouTube video using a pretrained segmentation model at image size 320:
107
+ yolo segment predict model=yolo11n-seg.pt source='https://youtu.be/LNwODJXcvt4' imgsz=320
108
+
109
+ - Val a pretrained detection model at batch-size 1 and image size 640:
110
+ yolo detect val model=yolo11n.pt data=coco8.yaml batch=1 imgsz=640
111
+
112
+ - Export a YOLO11n classification model to ONNX format at image size 224 by 128 (no TASK required)
113
+ yolo export model=yolo11n-cls.pt format=onnx imgsz=224,128
114
+
115
+ - Run special commands:
116
+ yolo help
117
+ yolo checks
118
+ yolo version
119
+ yolo settings
120
+ yolo copy-cfg
121
+ yolo cfg
122
+
123
+ Docs: https://docs.ultralytics.com
124
+ Community: https://community.ultralytics.com
125
+ GitHub: https://github.com/ultralytics/ultralytics
126
+ """
127
+
128
+ # Settings and Environment Variables
129
+ torch.set_printoptions(linewidth=320, precision=4, profile="default")
130
+ np.set_printoptions(linewidth=320, formatter=dict(float_kind="{:11.5g}".format)) # format short g, %precision=5
131
+ cv2.setNumThreads(0) # prevent OpenCV from multithreading (incompatible with PyTorch DataLoader)
132
+ os.environ["NUMEXPR_MAX_THREADS"] = str(NUM_THREADS) # NumExpr max threads
133
+ os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3" # suppress verbose TF compiler warnings in Colab
134
+ os.environ["TORCH_CPP_LOG_LEVEL"] = "ERROR" # suppress "NNPACK.cpp could not initialize NNPACK" warnings
135
+ os.environ["KINETO_LOG_LEVEL"] = "5" # suppress verbose PyTorch profiler output when computing FLOPs
136
+
137
+ # Centralized warning suppression
138
+ warnings.filterwarnings("ignore", message="torch.distributed.reduce_op is deprecated") # PyTorch deprecation
139
+ warnings.filterwarnings("ignore", message="The figure layout has changed to tight") # matplotlib>=3.7.2
140
+ warnings.filterwarnings("ignore", category=FutureWarning, module="timm") # mobileclip timm.layers deprecation
141
+ warnings.filterwarnings("ignore", category=torch.jit.TracerWarning) # ONNX/TorchScript export tracer warnings
142
+ warnings.filterwarnings("ignore", category=UserWarning, message=".*prim::Constant.*") # ONNX shape warning
143
+ warnings.filterwarnings("ignore", category=DeprecationWarning, module="coremltools") # CoreML np.bool deprecation
144
+
145
+ # Precompiled type tuples for faster isinstance() checks
146
+ FLOAT_OR_INT = (float, int)
147
+ STR_OR_PATH = (str, Path)
148
+
149
+
150
+ class DataExportMixin:
151
+ """Mixin class for exporting validation metrics or prediction results in various formats.
152
+
153
+ This class provides utilities to export performance metrics (e.g., mAP, precision, recall) or prediction results
154
+ from classification, object detection, segmentation, or pose estimation tasks into various formats: Polars
155
+ DataFrame, CSV, and JSON.
156
+
157
+ Methods:
158
+ to_df: Convert summary to a Polars DataFrame.
159
+ to_csv: Export results as a CSV string.
160
+ to_json: Export results as a JSON string.
161
+ tojson: Deprecated alias for `to_json()`.
162
+
163
+ Examples:
164
+ >>> model = YOLO("yolo11n.pt")
165
+ >>> results = model("image.jpg")
166
+ >>> df = results.to_df()
167
+ >>> print(df)
168
+ >>> csv_data = results.to_csv()
169
+ """
170
+
171
+ def to_df(self, normalize=False, decimals=5):
172
+ """Create a Polars DataFrame from the prediction results summary or validation metrics.
173
+
174
+ Args:
175
+ normalize (bool, optional): Normalize numerical values for easier comparison.
176
+ decimals (int, optional): Decimal places to round floats.
177
+
178
+ Returns:
179
+ (polars.DataFrame): Polars DataFrame containing the summary data.
180
+ """
181
+ import polars as pl # scope for faster 'import ultralytics'
182
+
183
+ return pl.DataFrame(self.summary(normalize=normalize, decimals=decimals))
184
+
185
+ def to_csv(self, normalize=False, decimals=5):
186
+ """Export results or metrics to CSV string format.
187
+
188
+ Args:
189
+ normalize (bool, optional): Normalize numeric values.
190
+ decimals (int, optional): Decimal precision.
191
+
192
+ Returns:
193
+ (str): CSV content as string.
194
+ """
195
+ import polars as pl
196
+
197
+ df = self.to_df(normalize=normalize, decimals=decimals)
198
+
199
+ try:
200
+ return df.write_csv()
201
+ except Exception:
202
+ # Minimal string conversion for any remaining complex types
203
+ def _to_str_simple(v):
204
+ if v is None:
205
+ return ""
206
+ elif isinstance(v, (dict, list, tuple, set)):
207
+ return repr(v)
208
+ else:
209
+ return str(v)
210
+
211
+ df_str = df.select(
212
+ [pl.col(c).map_elements(_to_str_simple, return_dtype=pl.String).alias(c) for c in df.columns]
213
+ )
214
+ return df_str.write_csv()
215
+
216
+ def to_json(self, normalize=False, decimals=5):
217
+ """Export results to JSON format.
218
+
219
+ Args:
220
+ normalize (bool, optional): Normalize numeric values.
221
+ decimals (int, optional): Decimal precision.
222
+
223
+ Returns:
224
+ (str): JSON-formatted string of the results.
225
+ """
226
+ return self.to_df(normalize=normalize, decimals=decimals).write_json()
227
+
228
+
229
+ class SimpleClass:
230
+ """A simple base class for creating objects with string representations of their attributes.
231
+
232
+ This class provides a foundation for creating objects that can be easily printed or represented as strings, showing
233
+ all their non-callable attributes. It's useful for debugging and introspection of object states.
234
+
235
+ Methods:
236
+ __str__: Return a human-readable string representation of the object.
237
+ __repr__: Return a machine-readable string representation of the object.
238
+ __getattr__: Provide a custom attribute access error message with helpful information.
239
+
240
+ Examples:
241
+ >>> class MyClass(SimpleClass):
242
+ ... def __init__(self):
243
+ ... self.x = 10
244
+ ... self.y = "hello"
245
+ >>> obj = MyClass()
246
+ >>> print(obj)
247
+ __main__.MyClass object with attributes:
248
+
249
+ x: 10
250
+ y: 'hello'
251
+
252
+ Notes:
253
+ - This class is designed to be subclassed. It provides a convenient way to inspect object attributes.
254
+ - The string representation includes the module and class name of the object.
255
+ - Callable attributes and attributes starting with an underscore are excluded from the string representation.
256
+ """
257
+
258
+ def __str__(self):
259
+ """Return a human-readable string representation of the object."""
260
+ attr = []
261
+ for a in dir(self):
262
+ v = getattr(self, a)
263
+ if not callable(v) and not a.startswith("_"):
264
+ if isinstance(v, SimpleClass):
265
+ # Display only the module and class name for subclasses
266
+ s = f"{a}: {v.__module__}.{v.__class__.__name__} object"
267
+ else:
268
+ s = f"{a}: {v!r}"
269
+ attr.append(s)
270
+ return f"{self.__module__}.{self.__class__.__name__} object with attributes:\n\n" + "\n".join(attr)
271
+
272
+ def __repr__(self):
273
+ """Return a machine-readable string representation of the object."""
274
+ return self.__str__()
275
+
276
+ def __getattr__(self, attr):
277
+ """Provide a custom attribute access error message with helpful information."""
278
+ name = self.__class__.__name__
279
+ raise AttributeError(f"'{name}' object has no attribute '{attr}'. See valid attributes below.\n{self.__doc__}")
280
+
281
+
282
+ class IterableSimpleNamespace(SimpleNamespace):
283
+ """An iterable SimpleNamespace class that provides enhanced functionality for attribute access and iteration.
284
+
285
+ This class extends the SimpleNamespace class with additional methods for iteration, string representation, and
286
+ attribute access. It is designed to be used as a convenient container for storing and accessing configuration
287
+ parameters.
288
+
289
+ Methods:
290
+ __iter__: Return an iterator of key-value pairs from the namespace's attributes.
291
+ __str__: Return a human-readable string representation of the object.
292
+ __getattr__: Provide a custom attribute access error message with helpful information.
293
+ get: Retrieve the value of a specified key, or a default value if the key doesn't exist.
294
+
295
+ Examples:
296
+ >>> cfg = IterableSimpleNamespace(a=1, b=2, c=3)
297
+ >>> for k, v in cfg:
298
+ ... print(f"{k}: {v}")
299
+ a: 1
300
+ b: 2
301
+ c: 3
302
+ >>> print(cfg)
303
+ a=1
304
+ b=2
305
+ c=3
306
+ >>> cfg.get("b")
307
+ 2
308
+ >>> cfg.get("d", "default")
309
+ 'default'
310
+
311
+ Notes:
312
+ This class is particularly useful for storing configuration parameters in a more accessible
313
+ and iterable format compared to a standard dictionary.
314
+ """
315
+
316
+ def __iter__(self):
317
+ """Return an iterator of key-value pairs from the namespace's attributes."""
318
+ return iter(vars(self).items())
319
+
320
+ def __str__(self):
321
+ """Return a human-readable string representation of the object."""
322
+ return "\n".join(f"{k}={v}" for k, v in vars(self).items())
323
+
324
+ def __getattr__(self, attr):
325
+ """Provide a custom attribute access error message with helpful information."""
326
+ name = self.__class__.__name__
327
+ raise AttributeError(
328
+ f"""
329
+ '{name}' object has no attribute '{attr}'. This may be caused by a modified or out of date ultralytics
330
+ 'default.yaml' file.\nPlease update your code with 'pip install -U ultralytics' and if necessary replace
331
+ {DEFAULT_CFG_PATH} with the latest version from
332
+ https://github.com/ultralytics/ultralytics/blob/main/ultralytics/cfg/default.yaml
333
+ """
334
+ )
335
+
336
+ def get(self, key, default=None):
337
+ """Return the value of the specified key if it exists; otherwise, return the default value."""
338
+ return getattr(self, key, default)
339
+
340
+
341
+ def plt_settings(rcparams=None, backend="Agg"):
342
+ """Decorator to temporarily set rc parameters and the backend for a plotting function.
343
+
344
+ Args:
345
+ rcparams (dict, optional): Dictionary of rc parameters to set.
346
+ backend (str, optional): Name of the backend to use.
347
+
348
+ Returns:
349
+ (Callable): Decorated function with temporarily set rc parameters and backend.
350
+
351
+ Examples:
352
+ >>> @plt_settings({"font.size": 12})
353
+ >>> def plot_function():
354
+ ... plt.figure()
355
+ ... plt.plot([1, 2, 3])
356
+ ... plt.show()
357
+
358
+ >>> with plt_settings({"font.size": 12}):
359
+ ... plt.figure()
360
+ ... plt.plot([1, 2, 3])
361
+ ... plt.show()
362
+ """
363
+ if rcparams is None:
364
+ rcparams = {"font.size": 11}
365
+
366
+ def decorator(func):
367
+ """Decorator to apply temporary rc parameters and backend to a function."""
368
+
369
+ def wrapper(*args, **kwargs):
370
+ """Set rc parameters and backend, call the original function, and restore the settings."""
371
+ import matplotlib.pyplot as plt # scope for faster 'import ultralytics'
372
+
373
+ original_backend = plt.get_backend()
374
+ switch = backend.lower() != original_backend.lower()
375
+ if switch:
376
+ plt.close("all") # auto-close()ing of figures upon backend switching is deprecated since 3.8
377
+ plt.switch_backend(backend)
378
+
379
+ # Plot with backend and always revert to original backend
380
+ try:
381
+ with plt.rc_context(rcparams):
382
+ result = func(*args, **kwargs)
383
+ finally:
384
+ if switch:
385
+ plt.close("all")
386
+ plt.switch_backend(original_backend)
387
+ return result
388
+
389
+ return wrapper
390
+
391
+ return decorator
392
+
393
+
394
+ def set_logging(name="LOGGING_NAME", verbose=True):
395
+ """Set up logging with UTF-8 encoding and configurable verbosity.
396
+
397
+ This function configures logging for the Ultralytics library, setting the appropriate logging level and formatter
398
+ based on the verbosity flag and the current process rank. It handles special cases for Windows environments where
399
+ UTF-8 encoding might not be the default.
400
+
401
+ Args:
402
+ name (str): Name of the logger.
403
+ verbose (bool): Flag to set logging level to INFO if True, ERROR otherwise.
404
+
405
+ Returns:
406
+ (logging.Logger): Configured logger object.
407
+
408
+ Examples:
409
+ >>> set_logging(name="ultralytics", verbose=True)
410
+ >>> logger = logging.getLogger("ultralytics")
411
+ >>> logger.info("This is an info message")
412
+
413
+ Notes:
414
+ - On Windows, this function attempts to reconfigure stdout to use UTF-8 encoding if possible.
415
+ - If reconfiguration is not possible, it falls back to a custom formatter that handles non-UTF-8 environments.
416
+ - The function sets up a StreamHandler with the appropriate formatter and level.
417
+ - The logger's propagate flag is set to False to prevent duplicate logging in parent loggers.
418
+ """
419
+ level = logging.INFO if verbose and RANK in {-1, 0} else logging.ERROR # rank in world for Multi-GPU trainings
420
+
421
+ class PrefixFormatter(logging.Formatter):
422
+ def format(self, record):
423
+ """Format log records with prefixes based on level."""
424
+ # Apply prefixes based on log level
425
+ if record.levelno == logging.WARNING:
426
+ prefix = "WARNING" if WINDOWS else "WARNING ⚠️"
427
+ record.msg = f"{prefix} {record.msg}"
428
+ elif record.levelno == logging.ERROR:
429
+ prefix = "ERROR" if WINDOWS else "ERROR ❌"
430
+ record.msg = f"{prefix} {record.msg}"
431
+
432
+ # Handle emojis in message based on platform
433
+ formatted_message = super().format(record)
434
+ return emojis(formatted_message)
435
+
436
+ formatter = PrefixFormatter("%(message)s")
437
+
438
+ # Handle Windows UTF-8 encoding issues
439
+ if WINDOWS and hasattr(sys.stdout, "encoding") and sys.stdout.encoding != "utf-8":
440
+ with contextlib.suppress(Exception):
441
+ # Attempt to reconfigure stdout to use UTF-8 encoding if possible
442
+ if hasattr(sys.stdout, "reconfigure"):
443
+ sys.stdout.reconfigure(encoding="utf-8")
444
+ # For environments where reconfigure is not available, wrap stdout in a TextIOWrapper
445
+ elif hasattr(sys.stdout, "buffer"):
446
+ import io
447
+
448
+ sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8")
449
+
450
+ # Create and configure the StreamHandler with the appropriate formatter and level
451
+ stream_handler = logging.StreamHandler(sys.stdout)
452
+ stream_handler.setFormatter(formatter)
453
+ stream_handler.setLevel(level)
454
+
455
+ # Set up the logger
456
+ logger = logging.getLogger(name)
457
+ logger.setLevel(level)
458
+ logger.addHandler(stream_handler)
459
+ logger.propagate = False
460
+ return logger
461
+
462
+
463
+ # Set logger
464
+ LOGGER = set_logging(LOGGING_NAME, verbose=VERBOSE) # define globally (used in train.py, val.py, predict.py, etc.)
465
+ logging.getLogger("sentry_sdk").setLevel(logging.CRITICAL + 1)
466
+
467
+
468
+ def emojis(string=""):
469
+ """Return platform-dependent emoji-safe version of string."""
470
+ return string.encode().decode("ascii", "ignore") if WINDOWS else string
471
+
472
+
473
+ class ThreadingLocked:
474
+ """A decorator class for ensuring thread-safe execution of a function or method.
475
+
476
+ This class can be used as a decorator to make sure that if the decorated function is called from multiple threads,
477
+ only one thread at a time will be able to execute the function.
478
+
479
+ Attributes:
480
+ lock (threading.Lock): A lock object used to manage access to the decorated function.
481
+
482
+ Examples:
483
+ >>> from ultralytics.utils import ThreadingLocked
484
+ >>> @ThreadingLocked()
485
+ >>> def my_function():
486
+ ... # Your code here
487
+ """
488
+
489
+ def __init__(self):
490
+ """Initialize the decorator class with a threading lock."""
491
+ self.lock = threading.Lock()
492
+
493
+ def __call__(self, f):
494
+ """Run thread-safe execution of function or method."""
495
+ from functools import wraps
496
+
497
+ @wraps(f)
498
+ def decorated(*args, **kwargs):
499
+ """Apply thread-safety to the decorated function or method."""
500
+ with self.lock:
501
+ return f(*args, **kwargs)
502
+
503
+ return decorated
504
+
505
+
506
+ class YAML:
507
+ """YAML utility class for efficient file operations with automatic C-implementation detection.
508
+
509
+ This class provides optimized YAML loading and saving operations using PyYAML's fastest available implementation
510
+ (C-based when possible). It implements a singleton pattern with lazy initialization, allowing direct class method
511
+ usage without explicit instantiation. The class handles file path creation, validation, and character encoding
512
+ issues automatically.
513
+
514
+ The implementation prioritizes performance through:
515
+ - Automatic C-based loader/dumper selection when available
516
+ - Singleton pattern to reuse the same instance
517
+ - Lazy initialization to defer import costs until needed
518
+ - Fallback mechanisms for handling problematic YAML content
519
+
520
+ Attributes:
521
+ _instance: Internal singleton instance storage.
522
+ yaml: Reference to the PyYAML module.
523
+ SafeLoader: Best available YAML loader (CSafeLoader if available).
524
+ SafeDumper: Best available YAML dumper (CSafeDumper if available).
525
+
526
+ Examples:
527
+ >>> data = YAML.load("config.yaml")
528
+ >>> data["new_value"] = 123
529
+ >>> YAML.save("updated_config.yaml", data)
530
+ >>> YAML.print(data)
531
+ """
532
+
533
+ _instance = None
534
+
535
+ @classmethod
536
+ def _get_instance(cls):
537
+ """Initialize singleton instance on first use."""
538
+ if cls._instance is None:
539
+ cls._instance = cls()
540
+ return cls._instance
541
+
542
+ def __init__(self):
543
+ """Initialize with optimal YAML implementation (C-based when available)."""
544
+ import yaml
545
+
546
+ self.yaml = yaml
547
+ # Use C-based implementation if available for better performance
548
+ try:
549
+ self.SafeLoader = yaml.CSafeLoader
550
+ self.SafeDumper = yaml.CSafeDumper
551
+ except (AttributeError, ImportError):
552
+ self.SafeLoader = yaml.SafeLoader
553
+ self.SafeDumper = yaml.SafeDumper
554
+
555
+ @classmethod
556
+ def save(cls, file="data.yaml", data=None, header=""):
557
+ """Save Python object as YAML file.
558
+
559
+ Args:
560
+ file (str | Path): Path to save YAML file.
561
+ data (dict | None): Dict or compatible object to save.
562
+ header (str): Optional string to add at file beginning.
563
+ """
564
+ instance = cls._get_instance()
565
+ if data is None:
566
+ data = {}
567
+
568
+ # Create parent directories if needed
569
+ file = Path(file)
570
+ file.parent.mkdir(parents=True, exist_ok=True)
571
+
572
+ # Convert non-serializable objects to strings
573
+ valid_types = int, float, str, bool, list, tuple, dict, type(None)
574
+ for k, v in data.items():
575
+ if not isinstance(v, valid_types):
576
+ data[k] = str(v)
577
+
578
+ # Write YAML file
579
+ with open(file, "w", errors="ignore", encoding="utf-8") as f:
580
+ if header:
581
+ f.write(header)
582
+ instance.yaml.dump(data, f, sort_keys=False, allow_unicode=True, Dumper=instance.SafeDumper)
583
+
584
+ @classmethod
585
+ def load(cls, file="data.yaml", append_filename=False):
586
+ """Load YAML file to Python object with robust error handling.
587
+
588
+ Args:
589
+ file (str | Path): Path to YAML file.
590
+ append_filename (bool): Whether to add filename to returned dict.
591
+
592
+ Returns:
593
+ (dict): Loaded YAML content.
594
+ """
595
+ instance = cls._get_instance()
596
+ assert str(file).endswith((".yaml", ".yml")), f"Not a YAML file: {file}"
597
+
598
+ # Read file content
599
+ with open(file, errors="ignore", encoding="utf-8") as f:
600
+ s = f.read()
601
+
602
+ # Try loading YAML with fallback for problematic characters
603
+ try:
604
+ data = instance.yaml.load(s, Loader=instance.SafeLoader) or {}
605
+ except Exception:
606
+ # Remove problematic characters and retry
607
+ s = re.sub(r"[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD\U00010000-\U0010ffff]+", "", s)
608
+ data = instance.yaml.load(s, Loader=instance.SafeLoader) or {}
609
+
610
+ # Check for accidental user-error None strings (should be 'null' in YAML)
611
+ if "None" in data.values():
612
+ data = {k: None if v == "None" else v for k, v in data.items()}
613
+
614
+ if append_filename:
615
+ data["yaml_file"] = str(file)
616
+ return data
617
+
618
+ @classmethod
619
+ def print(cls, yaml_file):
620
+ """Pretty print YAML file or object to console.
621
+
622
+ Args:
623
+ yaml_file (str | Path | dict): Path to YAML file or dict to print.
624
+ """
625
+ instance = cls._get_instance()
626
+
627
+ # Load file if path provided
628
+ yaml_dict = cls.load(yaml_file) if isinstance(yaml_file, (str, Path)) else yaml_file
629
+
630
+ # Use -1 for unlimited width in C implementation
631
+ dump = instance.yaml.dump(yaml_dict, sort_keys=False, allow_unicode=True, width=-1, Dumper=instance.SafeDumper)
632
+
633
+ LOGGER.info(f"Printing '{colorstr('bold', 'black', yaml_file)}'\n\n{dump}")
634
+
635
+
636
+ # Default configuration
637
+ DEFAULT_CFG_DICT = YAML.load(DEFAULT_CFG_PATH)
638
+ DEFAULT_CFG_KEYS = DEFAULT_CFG_DICT.keys()
639
+ DEFAULT_CFG = IterableSimpleNamespace(**DEFAULT_CFG_DICT)
640
+
641
+
642
+ def read_device_model() -> str:
643
+ """Read the device model information from the system and cache it for quick access.
644
+
645
+ Returns:
646
+ (str): Kernel release information.
647
+ """
648
+ return platform.release().lower()
649
+
650
+
651
+ def is_ubuntu() -> bool:
652
+ """Check if the OS is Ubuntu.
653
+
654
+ Returns:
655
+ (bool): True if OS is Ubuntu, False otherwise.
656
+ """
657
+ try:
658
+ with open("/etc/os-release") as f:
659
+ return "ID=ubuntu" in f.read()
660
+ except FileNotFoundError:
661
+ return False
662
+
663
+
664
+ def is_debian(codenames: list[str] | None | str = None) -> list[bool] | bool:
665
+ """Check if the OS is Debian.
666
+
667
+ Args:
668
+ codenames (list[str] | None | str): Specific Debian codename to check for (e.g., 'buster', 'bullseye'). If None,
669
+ only checks for Debian.
670
+
671
+ Returns:
672
+ (list[bool] | bool): List of booleans indicating if OS matches each Debian codename, or a single boolean if no
673
+ codenames provided.
674
+ """
675
+ try:
676
+ with open("/etc/os-release") as f:
677
+ content = f.read()
678
+ if codenames is None:
679
+ return "ID=debian" in content
680
+ if isinstance(codenames, str):
681
+ codenames = [codenames]
682
+ return [
683
+ f"VERSION_CODENAME={codename}" in content if codename else "ID=debian" in content
684
+ for codename in codenames
685
+ ]
686
+ except FileNotFoundError:
687
+ return [False] * len(codenames) if codenames else False
688
+
689
+
690
+ def is_colab():
691
+ """Check if the current script is running inside a Google Colab notebook.
692
+
693
+ Returns:
694
+ (bool): True if running inside a Colab notebook, False otherwise.
695
+ """
696
+ return "COLAB_RELEASE_TAG" in os.environ or "COLAB_BACKEND_VERSION" in os.environ
697
+
698
+
699
+ def is_kaggle():
700
+ """Check if the current script is running inside a Kaggle kernel.
701
+
702
+ Returns:
703
+ (bool): True if running inside a Kaggle kernel, False otherwise.
704
+ """
705
+ return os.environ.get("PWD") == "/kaggle/working" and os.environ.get("KAGGLE_URL_BASE") == "https://www.kaggle.com"
706
+
707
+
708
+ def is_jupyter():
709
+ """Check if the current script is running inside a Jupyter Notebook.
710
+
711
+ Returns:
712
+ (bool): True if running inside a Jupyter Notebook, False otherwise.
713
+
714
+ Notes:
715
+ - Only works on Colab and Kaggle, other environments like Jupyterlab and Paperspace are not reliably detectable.
716
+ - "get_ipython" in globals() method suffers false positives when IPython package installed manually.
717
+ """
718
+ return IS_COLAB or IS_KAGGLE
719
+
720
+
721
+ def is_runpod():
722
+ """Check if the current script is running inside a RunPod container.
723
+
724
+ Returns:
725
+ (bool): True if running in RunPod, False otherwise.
726
+ """
727
+ return "RUNPOD_POD_ID" in os.environ
728
+
729
+
730
+ def is_docker() -> bool:
731
+ """Determine if the script is running inside a Docker container.
732
+
733
+ Returns:
734
+ (bool): True if the script is running inside a Docker container, False otherwise.
735
+ """
736
+ try:
737
+ return os.path.exists("/.dockerenv")
738
+ except Exception:
739
+ return False
740
+
741
+
742
+ def is_raspberrypi() -> bool:
743
+ """Determine if the Python environment is running on a Raspberry Pi.
744
+
745
+ Returns:
746
+ (bool): True if running on a Raspberry Pi, False otherwise.
747
+ """
748
+ return "rpi" in DEVICE_MODEL
749
+
750
+
751
+ @lru_cache(maxsize=3)
752
+ def is_jetson(jetpack=None) -> bool:
753
+ """Determine if the Python environment is running on an NVIDIA Jetson device.
754
+
755
+ Args:
756
+ jetpack (int | None): If specified, check for specific JetPack version (4, 5, 6).
757
+
758
+ Returns:
759
+ (bool): True if running on an NVIDIA Jetson device, False otherwise.
760
+ """
761
+ jetson = "tegra" in DEVICE_MODEL
762
+ if jetson and jetpack:
763
+ try:
764
+ content = open("/etc/nv_tegra_release").read()
765
+ version_map = {4: "R32", 5: "R35", 6: "R36"} # JetPack to L4T major version mapping
766
+ return jetpack in version_map and version_map[jetpack] in content
767
+ except Exception:
768
+ return False
769
+ return jetson
770
+
771
+
772
+ def is_online() -> bool:
773
+ """Fast online check using DNS (v4/v6) resolution (Cloudflare + Google).
774
+
775
+ Returns:
776
+ (bool): True if connection is successful, False otherwise.
777
+ """
778
+ if str(os.getenv("YOLO_OFFLINE", "")).lower() == "true":
779
+ return False
780
+
781
+ for host in ("one.one.one.one", "dns.google"):
782
+ try:
783
+ socket.getaddrinfo(host, 0, socket.AF_UNSPEC, 0, 0, socket.AI_ADDRCONFIG)
784
+ return True
785
+ except OSError:
786
+ continue
787
+ return False
788
+
789
+
790
+ def is_pip_package(filepath: str = __name__) -> bool:
791
+ """Determine if the file at the given filepath is part of a pip package.
792
+
793
+ Args:
794
+ filepath (str): The filepath to check.
795
+
796
+ Returns:
797
+ (bool): True if the file is part of a pip package, False otherwise.
798
+ """
799
+ import importlib.util
800
+
801
+ # Get the spec for the module
802
+ spec = importlib.util.find_spec(filepath)
803
+
804
+ # Return whether the spec is not None and the origin is not None (indicating it is a package)
805
+ return spec is not None and spec.origin is not None
806
+
807
+
808
+ def is_dir_writeable(dir_path: str | Path) -> bool:
809
+ """Check if a directory is writable.
810
+
811
+ Args:
812
+ dir_path (str | Path): The path to the directory.
813
+
814
+ Returns:
815
+ (bool): True if the directory is writable, False otherwise.
816
+ """
817
+ return os.access(str(dir_path), os.W_OK)
818
+
819
+
820
+ def is_pytest_running():
821
+ """Determine whether pytest is currently running or not.
822
+
823
+ Returns:
824
+ (bool): True if pytest is running, False otherwise.
825
+ """
826
+ return ("PYTEST_CURRENT_TEST" in os.environ) or ("pytest" in sys.modules) or ("pytest" in Path(ARGV[0]).stem)
827
+
828
+
829
+ def is_github_action_running() -> bool:
830
+ """Determine if the current environment is a GitHub Actions runner.
831
+
832
+ Returns:
833
+ (bool): True if the current environment is a GitHub Actions runner, False otherwise.
834
+ """
835
+ return "GITHUB_ACTIONS" in os.environ and "GITHUB_WORKFLOW" in os.environ and "RUNNER_OS" in os.environ
836
+
837
+
838
+ def get_default_args(func):
839
+ """Return a dictionary of default arguments for a function.
840
+
841
+ Args:
842
+ func (callable): The function to inspect.
843
+
844
+ Returns:
845
+ (dict): A dictionary where each key is a parameter name, and each value is the default value of that parameter.
846
+ """
847
+ signature = inspect.signature(func)
848
+ return {k: v.default for k, v in signature.parameters.items() if v.default is not inspect.Parameter.empty}
849
+
850
+
851
+ def get_ubuntu_version():
852
+ """Retrieve the Ubuntu version if the OS is Ubuntu.
853
+
854
+ Returns:
855
+ (str): Ubuntu version or None if not an Ubuntu OS.
856
+ """
857
+ if is_ubuntu():
858
+ try:
859
+ with open("/etc/os-release") as f:
860
+ return re.search(r'VERSION_ID="(\d+\.\d+)"', f.read())[1]
861
+ except (FileNotFoundError, AttributeError):
862
+ return None
863
+
864
+
865
+ def get_user_config_dir(sub_dir="Ultralytics"):
866
+ """Return a writable config dir, preferring YOLO_CONFIG_DIR and being OS-aware.
867
+
868
+ Args:
869
+ sub_dir (str): The name of the subdirectory to create.
870
+
871
+ Returns:
872
+ (Path): The path to the user config directory.
873
+ """
874
+ if env_dir := os.getenv("YOLO_CONFIG_DIR"):
875
+ p = Path(env_dir).expanduser() / sub_dir
876
+ elif LINUX:
877
+ p = Path(os.getenv("XDG_CONFIG_HOME", Path.home() / ".config")) / sub_dir
878
+ elif WINDOWS:
879
+ p = Path.home() / "AppData" / "Roaming" / sub_dir
880
+ elif MACOS:
881
+ p = Path.home() / "Library" / "Application Support" / sub_dir
882
+ else:
883
+ raise ValueError(f"Unsupported operating system: {platform.system()}")
884
+
885
+ if p.exists(): # already created → trust it
886
+ return p
887
+ if is_dir_writeable(p.parent): # create if possible
888
+ p.mkdir(parents=True, exist_ok=True)
889
+ return p
890
+
891
+ # Fallbacks for Docker, GCP/AWS functions where only /tmp is writable
892
+ for alt in [Path("/tmp") / sub_dir, Path.cwd() / sub_dir]:
893
+ if alt.exists():
894
+ return alt
895
+ if is_dir_writeable(alt.parent):
896
+ alt.mkdir(parents=True, exist_ok=True)
897
+ LOGGER.warning(
898
+ f"user config directory '{p}' is not writable, using '{alt}'. Set YOLO_CONFIG_DIR to override."
899
+ )
900
+ return alt
901
+
902
+ # Last fallback → CWD
903
+ p = Path.cwd() / sub_dir
904
+ p.mkdir(parents=True, exist_ok=True)
905
+ return p
906
+
907
+
908
+ # Define constants (required below)
909
+ DEVICE_MODEL = read_device_model() # is_jetson() and is_raspberrypi() depend on this constant
910
+ ONLINE = is_online()
911
+ IS_COLAB = is_colab()
912
+ IS_KAGGLE = is_kaggle()
913
+ IS_DOCKER = is_docker()
914
+ IS_JETSON = is_jetson()
915
+ IS_JUPYTER = is_jupyter()
916
+ IS_PIP_PACKAGE = is_pip_package()
917
+ IS_RASPBERRYPI = is_raspberrypi()
918
+ IS_DEBIAN, IS_DEBIAN_BOOKWORM, IS_DEBIAN_TRIXIE = is_debian([None, "bookworm", "trixie"])
919
+ IS_UBUNTU = is_ubuntu()
920
+ GIT = GitRepo()
921
+ USER_CONFIG_DIR = get_user_config_dir() # Ultralytics settings dir
922
+ SETTINGS_FILE = USER_CONFIG_DIR / "settings.json"
923
+
924
+
925
+ def colorstr(*input):
926
+ r"""Color a string based on the provided color and style arguments using ANSI escape codes.
927
+
928
+ This function can be called in two ways:
929
+ - colorstr('color', 'style', 'your string')
930
+ - colorstr('your string')
931
+
932
+ In the second form, 'blue' and 'bold' will be applied by default.
933
+
934
+ Args:
935
+ *input (str | Path): A sequence of strings where the first n-1 strings are color and style arguments, and the
936
+ last string is the one to be colored.
937
+
938
+ Returns:
939
+ (str): The input string wrapped with ANSI escape codes for the specified color and style.
940
+
941
+ Examples:
942
+ >>> colorstr("blue", "bold", "hello world")
943
+ >>> "\033[34m\033[1mhello world\033[0m"
944
+
945
+ Notes:
946
+ Supported Colors and Styles:
947
+ - Basic Colors: 'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
948
+ - Bright Colors: 'bright_black', 'bright_red', 'bright_green', 'bright_yellow',
949
+ 'bright_blue', 'bright_magenta', 'bright_cyan', 'bright_white'
950
+ - Misc: 'end', 'bold', 'underline'
951
+
952
+ References:
953
+ https://en.wikipedia.org/wiki/ANSI_escape_code
954
+ """
955
+ *args, string = input if len(input) > 1 else ("blue", "bold", input[0]) # color arguments, string
956
+ colors = {
957
+ "black": "\033[30m", # basic colors
958
+ "red": "\033[31m",
959
+ "green": "\033[32m",
960
+ "yellow": "\033[33m",
961
+ "blue": "\033[34m",
962
+ "magenta": "\033[35m",
963
+ "cyan": "\033[36m",
964
+ "white": "\033[37m",
965
+ "bright_black": "\033[90m", # bright colors
966
+ "bright_red": "\033[91m",
967
+ "bright_green": "\033[92m",
968
+ "bright_yellow": "\033[93m",
969
+ "bright_blue": "\033[94m",
970
+ "bright_magenta": "\033[95m",
971
+ "bright_cyan": "\033[96m",
972
+ "bright_white": "\033[97m",
973
+ "end": "\033[0m", # misc
974
+ "bold": "\033[1m",
975
+ "underline": "\033[4m",
976
+ }
977
+ return "".join(colors[x] for x in args) + f"{string}" + colors["end"]
978
+
979
+
980
+ def remove_colorstr(input_string):
981
+ """Remove ANSI escape codes from a string, effectively un-coloring it.
982
+
983
+ Args:
984
+ input_string (str): The string to remove color and style from.
985
+
986
+ Returns:
987
+ (str): A new string with all ANSI escape codes removed.
988
+
989
+ Examples:
990
+ >>> remove_colorstr(colorstr("blue", "bold", "hello world"))
991
+ >>> "hello world"
992
+ """
993
+ ansi_escape = re.compile(r"\x1B\[[0-9;]*[A-Za-z]")
994
+ return ansi_escape.sub("", input_string)
995
+
996
+
997
+ class TryExcept(contextlib.ContextDecorator):
998
+ """Ultralytics TryExcept class for handling exceptions gracefully.
999
+
1000
+ This class can be used as a decorator or context manager to catch exceptions and optionally print warning messages.
1001
+ It allows code to continue execution even when exceptions occur, which is useful for non-critical operations.
1002
+
1003
+ Attributes:
1004
+ msg (str): Optional message to display when an exception occurs.
1005
+ verbose (bool): Whether to print the exception message.
1006
+
1007
+ Examples:
1008
+ As a decorator:
1009
+ >>> @TryExcept(msg="Error occurred in func", verbose=True)
1010
+ >>> def func():
1011
+ >>> # Function logic here
1012
+ >>> pass
1013
+
1014
+ As a context manager:
1015
+ >>> with TryExcept(msg="Error occurred in block", verbose=True):
1016
+ >>> # Code block here
1017
+ >>> pass
1018
+ """
1019
+
1020
+ def __init__(self, msg="", verbose=True):
1021
+ """Initialize TryExcept class with optional message and verbosity settings."""
1022
+ self.msg = msg
1023
+ self.verbose = verbose
1024
+
1025
+ def __enter__(self):
1026
+ """Execute when entering TryExcept context, initialize instance."""
1027
+ pass
1028
+
1029
+ def __exit__(self, exc_type, value, traceback):
1030
+ """Define behavior when exiting a 'with' block, print error message if necessary."""
1031
+ if self.verbose and value:
1032
+ LOGGER.warning(f"{self.msg}{': ' if self.msg else ''}{value}")
1033
+ return True
1034
+
1035
+
1036
+ class Retry(contextlib.ContextDecorator):
1037
+ """Retry class for function execution with exponential backoff.
1038
+
1039
+ This decorator can be used to retry a function on exceptions, up to a specified number of times with an
1040
+ exponentially increasing delay between retries. It's useful for handling transient failures in network operations or
1041
+ other unreliable processes.
1042
+
1043
+ Attributes:
1044
+ times (int): Maximum number of retry attempts.
1045
+ delay (int): Initial delay between retries in seconds.
1046
+
1047
+ Examples:
1048
+ Example usage as a decorator:
1049
+ >>> @Retry(times=3, delay=2)
1050
+ >>> def test_func():
1051
+ >>> # Replace with function logic that may raise exceptions
1052
+ >>> return True
1053
+ """
1054
+
1055
+ def __init__(self, times=3, delay=2):
1056
+ """Initialize Retry class with specified number of retries and delay."""
1057
+ self.times = times
1058
+ self.delay = delay
1059
+ self._attempts = 0
1060
+
1061
+ def __call__(self, func):
1062
+ """Decorator implementation for Retry with exponential backoff."""
1063
+
1064
+ def wrapped_func(*args, **kwargs):
1065
+ """Apply retries to the decorated function or method."""
1066
+ self._attempts = 0
1067
+ while self._attempts < self.times:
1068
+ try:
1069
+ return func(*args, **kwargs)
1070
+ except Exception as e:
1071
+ self._attempts += 1
1072
+ LOGGER.warning(f"Retry {self._attempts}/{self.times} failed: {e}")
1073
+ if self._attempts >= self.times:
1074
+ raise e
1075
+ time.sleep(self.delay * (2**self._attempts)) # exponential backoff delay
1076
+
1077
+ return wrapped_func
1078
+
1079
+
1080
+ def threaded(func):
1081
+ """Multi-thread a target function by default and return the thread or function result.
1082
+
1083
+ This decorator provides flexible execution of the target function, either in a separate thread or synchronously. By
1084
+ default, the function runs in a thread, but this can be controlled via the 'threaded=False' keyword argument which
1085
+ is removed from kwargs before calling the function.
1086
+
1087
+ Args:
1088
+ func (callable): The function to be potentially executed in a separate thread.
1089
+
1090
+ Returns:
1091
+ (callable): A wrapper function that either returns a daemon thread or the direct function result.
1092
+
1093
+ Examples:
1094
+ >>> @threaded
1095
+ ... def process_data(data):
1096
+ ... return data
1097
+ >>>
1098
+ >>> thread = process_data(my_data) # Runs in background thread
1099
+ >>> result = process_data(my_data, threaded=False) # Runs synchronously, returns function result
1100
+ """
1101
+
1102
+ def wrapper(*args, **kwargs):
1103
+ """Multi-thread a given function based on 'threaded' kwarg and return the thread or function result."""
1104
+ if kwargs.pop("threaded", True): # run in thread
1105
+ thread = threading.Thread(target=func, args=args, kwargs=kwargs, daemon=True)
1106
+ thread.start()
1107
+ return thread
1108
+ else:
1109
+ return func(*args, **kwargs)
1110
+
1111
+ return wrapper
1112
+
1113
+
1114
+ def set_sentry():
1115
+ """Initialize the Sentry SDK for error tracking and reporting.
1116
+
1117
+ Only used if sentry_sdk package is installed and sync=True in settings. Run 'yolo settings' to see and update
1118
+ settings.
1119
+
1120
+ Conditions required to send errors (ALL conditions must be met or no errors will be reported):
1121
+ - sentry_sdk package is installed
1122
+ - sync=True in YOLO settings
1123
+ - pytest is not running
1124
+ - running in a pip package installation
1125
+ - running in a non-git directory
1126
+ - running with rank -1 or 0
1127
+ - online environment
1128
+ - CLI used to run package (checked with 'yolo' as the name of the main CLI command)
1129
+ """
1130
+ if (
1131
+ not SETTINGS["sync"]
1132
+ or RANK not in {-1, 0}
1133
+ or Path(ARGV[0]).name != "yolo"
1134
+ or TESTS_RUNNING
1135
+ or not ONLINE
1136
+ or not IS_PIP_PACKAGE
1137
+ or GIT.is_repo
1138
+ ):
1139
+ return
1140
+ # If sentry_sdk package is not installed then return and do not use Sentry
1141
+ try:
1142
+ import sentry_sdk
1143
+ except ImportError:
1144
+ return
1145
+
1146
+ def before_send(event, hint):
1147
+ """Modify the event before sending it to Sentry based on specific exception types and messages.
1148
+
1149
+ Args:
1150
+ event (dict): The event dictionary containing information about the error.
1151
+ hint (dict): A dictionary containing additional information about the error.
1152
+
1153
+ Returns:
1154
+ (dict | None): The modified event or None if the event should not be sent to Sentry.
1155
+ """
1156
+ if "exc_info" in hint:
1157
+ exc_type, exc_value, _ = hint["exc_info"]
1158
+ if exc_type in {KeyboardInterrupt, FileNotFoundError} or "out of memory" in str(exc_value):
1159
+ return None # do not send event
1160
+
1161
+ event["tags"] = {
1162
+ "sys_argv": ARGV[0],
1163
+ "sys_argv_name": Path(ARGV[0]).name,
1164
+ "install": "git" if GIT.is_repo else "pip" if IS_PIP_PACKAGE else "other",
1165
+ "os": ENVIRONMENT,
1166
+ }
1167
+ return event
1168
+
1169
+ sentry_sdk.init(
1170
+ dsn="https://888e5a0778212e1d0314c37d4b9aae5d@o4504521589325824.ingest.us.sentry.io/4504521592406016",
1171
+ debug=False,
1172
+ auto_enabling_integrations=False,
1173
+ traces_sample_rate=1.0,
1174
+ release=__version__,
1175
+ environment="runpod" if is_runpod() else "production",
1176
+ before_send=before_send,
1177
+ ignore_errors=[KeyboardInterrupt, FileNotFoundError],
1178
+ )
1179
+ sentry_sdk.set_user({"id": SETTINGS["uuid"]}) # SHA-256 anonymized UUID hash
1180
+
1181
+
1182
+ class JSONDict(dict):
1183
+ """A dictionary-like class that provides JSON persistence for its contents.
1184
+
1185
+ This class extends the built-in dictionary to automatically save its contents to a JSON file whenever they are
1186
+ modified. It ensures thread-safe operations using a lock and handles JSON serialization of Path objects.
1187
+
1188
+ Attributes:
1189
+ file_path (Path): The path to the JSON file used for persistence.
1190
+ lock (threading.Lock): A lock object to ensure thread-safe operations.
1191
+
1192
+ Methods:
1193
+ _load: Load the data from the JSON file into the dictionary.
1194
+ _save: Save the current state of the dictionary to the JSON file.
1195
+ __setitem__: Store a key-value pair and persist it to disk.
1196
+ __delitem__: Remove an item and update the persistent storage.
1197
+ update: Update the dictionary and persist changes.
1198
+ clear: Clear all entries and update the persistent storage.
1199
+
1200
+ Examples:
1201
+ >>> json_dict = JSONDict("data.json")
1202
+ >>> json_dict["key"] = "value"
1203
+ >>> print(json_dict["key"])
1204
+ value
1205
+ >>> del json_dict["key"]
1206
+ >>> json_dict.update({"new_key": "new_value"})
1207
+ >>> json_dict.clear()
1208
+ """
1209
+
1210
+ def __init__(self, file_path: str | Path = "data.json"):
1211
+ """Initialize a JSONDict object with a specified file path for JSON persistence."""
1212
+ super().__init__()
1213
+ self.file_path = Path(file_path)
1214
+ self.lock = Lock()
1215
+ self._load()
1216
+
1217
+ def _load(self):
1218
+ """Load the data from the JSON file into the dictionary."""
1219
+ try:
1220
+ if self.file_path.exists():
1221
+ with open(self.file_path) as f:
1222
+ # Use the base dict update to avoid persisting during reads
1223
+ super().update(json.load(f))
1224
+ except json.JSONDecodeError:
1225
+ LOGGER.warning(f"Error decoding JSON from {self.file_path}. Starting with an empty dictionary.")
1226
+ except Exception as e:
1227
+ LOGGER.error(f"Error reading from {self.file_path}: {e}")
1228
+
1229
+ def _save(self):
1230
+ """Save the current state of the dictionary to the JSON file."""
1231
+ try:
1232
+ self.file_path.parent.mkdir(parents=True, exist_ok=True)
1233
+ with open(self.file_path, "w", encoding="utf-8") as f:
1234
+ json.dump(dict(self), f, indent=2, default=self._json_default)
1235
+ except Exception as e:
1236
+ LOGGER.error(f"Error writing to {self.file_path}: {e}")
1237
+
1238
+ @staticmethod
1239
+ def _json_default(obj):
1240
+ """Handle JSON serialization of Path objects."""
1241
+ if isinstance(obj, Path):
1242
+ return str(obj)
1243
+ raise TypeError(f"Object of type {type(obj).__name__} is not JSON serializable")
1244
+
1245
+ def __setitem__(self, key, value):
1246
+ """Store a key-value pair and persist to disk."""
1247
+ with self.lock:
1248
+ super().__setitem__(key, value)
1249
+ self._save()
1250
+
1251
+ def __delitem__(self, key):
1252
+ """Remove an item and update the persistent storage."""
1253
+ with self.lock:
1254
+ super().__delitem__(key)
1255
+ self._save()
1256
+
1257
+ def __str__(self):
1258
+ """Return a pretty-printed JSON string representation of the dictionary."""
1259
+ contents = json.dumps(dict(self), indent=2, ensure_ascii=False, default=self._json_default)
1260
+ return f'JSONDict("{self.file_path}"):\n{contents}'
1261
+
1262
+ def update(self, *args, **kwargs):
1263
+ """Update the dictionary and persist changes."""
1264
+ with self.lock:
1265
+ super().update(*args, **kwargs)
1266
+ self._save()
1267
+
1268
+ def clear(self):
1269
+ """Clear all entries and update the persistent storage."""
1270
+ with self.lock:
1271
+ super().clear()
1272
+ self._save()
1273
+
1274
+
1275
+ class SettingsManager(JSONDict):
1276
+ """SettingsManager class for managing and persisting Ultralytics settings.
1277
+
1278
+ This class extends JSONDict to provide JSON persistence for settings, ensuring thread-safe operations and default
1279
+ values. It validates settings on initialization and provides methods to update or reset settings. The settings
1280
+ include directories for datasets, weights, and runs, as well as various integration flags.
1281
+
1282
+ Attributes:
1283
+ file (Path): The path to the JSON file used for persistence.
1284
+ version (str): The version of the settings schema.
1285
+ defaults (dict): A dictionary containing default settings.
1286
+ help_msg (str): A help message for users on how to view and update settings.
1287
+
1288
+ Methods:
1289
+ _validate_settings: Validate the current settings and reset if necessary.
1290
+ update: Update settings, validating keys and types.
1291
+ reset: Reset the settings to default and save them.
1292
+
1293
+ Examples:
1294
+ Initialize and update settings:
1295
+ >>> settings = SettingsManager()
1296
+ >>> settings.update(runs_dir="/new/runs/dir")
1297
+ >>> print(settings["runs_dir"])
1298
+ /new/runs/dir
1299
+ """
1300
+
1301
+ def __init__(self, file=SETTINGS_FILE, version="0.0.6"):
1302
+ """Initialize the SettingsManager with default settings and load user settings."""
1303
+ import hashlib
1304
+ import uuid
1305
+
1306
+ from ultralytics.utils.torch_utils import torch_distributed_zero_first
1307
+
1308
+ root = GIT.root or Path()
1309
+ datasets_root = (root.parent if GIT.root and is_dir_writeable(root.parent) else root).resolve()
1310
+
1311
+ self.file = Path(file)
1312
+ self.version = version
1313
+ self.defaults = {
1314
+ "settings_version": version, # Settings schema version
1315
+ "datasets_dir": str(datasets_root / "datasets"), # Datasets directory
1316
+ "weights_dir": str(root / "weights"), # Model weights directory
1317
+ "runs_dir": str(root / "runs"), # Experiment runs directory
1318
+ "uuid": hashlib.sha256(str(uuid.getnode()).encode()).hexdigest(), # SHA-256 anonymized UUID hash
1319
+ "sync": True, # Enable synchronization
1320
+ "api_key": "", # Ultralytics API Key
1321
+ "openai_api_key": "", # OpenAI API Key
1322
+ "clearml": True, # ClearML integration
1323
+ "comet": True, # Comet integration
1324
+ "dvc": True, # DVC integration
1325
+ "hub": True, # Ultralytics HUB integration
1326
+ "mlflow": True, # MLflow integration
1327
+ "neptune": True, # Neptune integration
1328
+ "raytune": True, # Ray Tune integration
1329
+ "tensorboard": False, # TensorBoard logging
1330
+ "wandb": False, # Weights & Biases logging
1331
+ "vscode_msg": True, # VSCode message
1332
+ "openvino_msg": True, # OpenVINO export on Intel CPU message
1333
+ }
1334
+
1335
+ self.help_msg = (
1336
+ f"\nView Ultralytics Settings with 'yolo settings' or at '{self.file}'"
1337
+ "\nUpdate Settings with 'yolo settings key=value', i.e. 'yolo settings runs_dir=path/to/dir'. "
1338
+ "For help see https://docs.ultralytics.com/quickstart/#ultralytics-settings."
1339
+ )
1340
+
1341
+ with torch_distributed_zero_first(LOCAL_RANK):
1342
+ super().__init__(self.file)
1343
+
1344
+ if not self.file.exists() or not self: # Check if file doesn't exist or is empty
1345
+ LOGGER.info(f"Creating new Ultralytics Settings v{version} file ✅ {self.help_msg}")
1346
+ self.reset()
1347
+
1348
+ self._validate_settings()
1349
+
1350
+ def _validate_settings(self):
1351
+ """Validate the current settings and reset if necessary."""
1352
+ correct_keys = frozenset(self.keys()) == frozenset(self.defaults.keys())
1353
+ correct_types = all(isinstance(self.get(k), type(v)) for k, v in self.defaults.items())
1354
+ correct_version = self.get("settings_version", "") == self.version
1355
+
1356
+ if not (correct_keys and correct_types and correct_version):
1357
+ LOGGER.warning(
1358
+ "Ultralytics settings reset to default values. This may be due to a possible problem "
1359
+ f"with your settings or a recent ultralytics package update. {self.help_msg}"
1360
+ )
1361
+ self.reset()
1362
+
1363
+ if self.get("datasets_dir") == self.get("runs_dir"):
1364
+ LOGGER.warning(
1365
+ f"Ultralytics setting 'datasets_dir: {self.get('datasets_dir')}' "
1366
+ f"must be different than 'runs_dir: {self.get('runs_dir')}'. "
1367
+ f"Please change one to avoid possible issues during training. {self.help_msg}"
1368
+ )
1369
+
1370
+ def __setitem__(self, key, value):
1371
+ """Update one key: value pair."""
1372
+ self.update({key: value})
1373
+
1374
+ def update(self, *args, **kwargs):
1375
+ """Update settings, validating keys and types."""
1376
+ for arg in args:
1377
+ if isinstance(arg, dict):
1378
+ kwargs.update(arg)
1379
+ for k, v in kwargs.items():
1380
+ if k not in self.defaults:
1381
+ raise KeyError(f"No Ultralytics setting '{k}'. {self.help_msg}")
1382
+ t = type(self.defaults[k])
1383
+ if not isinstance(v, t):
1384
+ raise TypeError(
1385
+ f"Ultralytics setting '{k}' must be '{t.__name__}' type, not '{type(v).__name__}'. {self.help_msg}"
1386
+ )
1387
+ super().update(*args, **kwargs)
1388
+
1389
+ def reset(self):
1390
+ """Reset the settings to default and save them."""
1391
+ self.clear()
1392
+ self.update(self.defaults)
1393
+
1394
+
1395
+ def deprecation_warn(arg, new_arg=None):
1396
+ """Issue a deprecation warning when a deprecated argument is used, suggesting an updated argument."""
1397
+ msg = f"'{arg}' is deprecated and will be removed in the future."
1398
+ if new_arg is not None:
1399
+ msg += f" Use '{new_arg}' instead."
1400
+ LOGGER.warning(msg)
1401
+
1402
+
1403
+ def clean_url(url):
1404
+ """Strip auth from URL, i.e. https://url.com/file.txt?auth -> https://url.com/file.txt."""
1405
+ url = Path(url).as_posix().replace(":/", "://") # Pathlib turns :// -> :/, as_posix() for Windows
1406
+ return unquote(url).split("?", 1)[0] # '%2F' to '/', split https://url.com/file.txt?auth
1407
+
1408
+
1409
+ def url2file(url):
1410
+ """Convert URL to filename, i.e. https://url.com/file.txt?auth -> file.txt."""
1411
+ return Path(clean_url(url)).name
1412
+
1413
+
1414
+ def vscode_msg(ext="ultralytics.ultralytics-snippets") -> str:
1415
+ """Display a message to install Ultralytics-Snippets for VS Code if not already installed."""
1416
+ path = (USER_CONFIG_DIR.parents[2] if WINDOWS else USER_CONFIG_DIR.parents[1]) / ".vscode/extensions"
1417
+ obs_file = path / ".obsolete" # file tracks uninstalled extensions, while source directory remains
1418
+ installed = any(path.glob(f"{ext}*")) and ext not in (obs_file.read_text("utf-8") if obs_file.exists() else "")
1419
+ url = "https://docs.ultralytics.com/integrations/vscode"
1420
+ return "" if installed else f"{colorstr('VS Code:')} view Ultralytics VS Code Extension ⚡ at {url}"
1421
+
1422
+
1423
+ # Run below code on utils init ------------------------------------------------------------------------------------
1424
+
1425
+ # Check first-install steps
1426
+ PREFIX = colorstr("Ultralytics: ")
1427
+ SETTINGS = SettingsManager() # initialize settings
1428
+ PERSISTENT_CACHE = JSONDict(USER_CONFIG_DIR / "persistent_cache.json") # initialize persistent cache
1429
+ DATASETS_DIR = Path(SETTINGS["datasets_dir"]) # global datasets directory
1430
+ WEIGHTS_DIR = Path(SETTINGS["weights_dir"]) # global weights directory
1431
+ RUNS_DIR = Path(SETTINGS["runs_dir"]) # global runs directory
1432
+ ENVIRONMENT = (
1433
+ "Colab"
1434
+ if IS_COLAB
1435
+ else "Kaggle"
1436
+ if IS_KAGGLE
1437
+ else "Jupyter"
1438
+ if IS_JUPYTER
1439
+ else "Docker"
1440
+ if IS_DOCKER
1441
+ else platform.system()
1442
+ )
1443
+ TESTS_RUNNING = is_pytest_running() or is_github_action_running()
1444
+ set_sentry()
1445
+
1446
+ # Apply monkey patches
1447
+ torch.save = torch_save
1448
+ if WINDOWS:
1449
+ # Apply cv2 patches for non-ASCII and non-UTF characters in image paths
1450
+ cv2.imread, cv2.imwrite, cv2.imshow = imread, imwrite, imshow