dgenerate-ultralytics-headless 8.3.137__py3-none-any.whl → 8.3.224__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (215) hide show
  1. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/METADATA +41 -34
  2. dgenerate_ultralytics_headless-8.3.224.dist-info/RECORD +285 -0
  3. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/WHEEL +1 -1
  4. tests/__init__.py +7 -6
  5. tests/conftest.py +15 -39
  6. tests/test_cli.py +17 -17
  7. tests/test_cuda.py +17 -8
  8. tests/test_engine.py +36 -10
  9. tests/test_exports.py +98 -37
  10. tests/test_integrations.py +12 -15
  11. tests/test_python.py +126 -82
  12. tests/test_solutions.py +319 -135
  13. ultralytics/__init__.py +27 -9
  14. ultralytics/cfg/__init__.py +83 -87
  15. ultralytics/cfg/datasets/Argoverse.yaml +4 -4
  16. ultralytics/cfg/datasets/DOTAv1.5.yaml +2 -2
  17. ultralytics/cfg/datasets/DOTAv1.yaml +2 -2
  18. ultralytics/cfg/datasets/GlobalWheat2020.yaml +2 -2
  19. ultralytics/cfg/datasets/HomeObjects-3K.yaml +4 -5
  20. ultralytics/cfg/datasets/ImageNet.yaml +3 -3
  21. ultralytics/cfg/datasets/Objects365.yaml +24 -20
  22. ultralytics/cfg/datasets/SKU-110K.yaml +9 -9
  23. ultralytics/cfg/datasets/VOC.yaml +10 -13
  24. ultralytics/cfg/datasets/VisDrone.yaml +43 -33
  25. ultralytics/cfg/datasets/african-wildlife.yaml +5 -5
  26. ultralytics/cfg/datasets/brain-tumor.yaml +4 -5
  27. ultralytics/cfg/datasets/carparts-seg.yaml +5 -5
  28. ultralytics/cfg/datasets/coco-pose.yaml +26 -4
  29. ultralytics/cfg/datasets/coco.yaml +4 -4
  30. ultralytics/cfg/datasets/coco128-seg.yaml +2 -2
  31. ultralytics/cfg/datasets/coco128.yaml +2 -2
  32. ultralytics/cfg/datasets/coco8-grayscale.yaml +103 -0
  33. ultralytics/cfg/datasets/coco8-multispectral.yaml +2 -2
  34. ultralytics/cfg/datasets/coco8-pose.yaml +23 -2
  35. ultralytics/cfg/datasets/coco8-seg.yaml +2 -2
  36. ultralytics/cfg/datasets/coco8.yaml +2 -2
  37. ultralytics/cfg/datasets/construction-ppe.yaml +32 -0
  38. ultralytics/cfg/datasets/crack-seg.yaml +5 -5
  39. ultralytics/cfg/datasets/dog-pose.yaml +32 -4
  40. ultralytics/cfg/datasets/dota8-multispectral.yaml +2 -2
  41. ultralytics/cfg/datasets/dota8.yaml +2 -2
  42. ultralytics/cfg/datasets/hand-keypoints.yaml +29 -4
  43. ultralytics/cfg/datasets/lvis.yaml +9 -9
  44. ultralytics/cfg/datasets/medical-pills.yaml +4 -5
  45. ultralytics/cfg/datasets/open-images-v7.yaml +7 -10
  46. ultralytics/cfg/datasets/package-seg.yaml +5 -5
  47. ultralytics/cfg/datasets/signature.yaml +4 -4
  48. ultralytics/cfg/datasets/tiger-pose.yaml +20 -4
  49. ultralytics/cfg/datasets/xView.yaml +5 -5
  50. ultralytics/cfg/default.yaml +96 -93
  51. ultralytics/cfg/trackers/botsort.yaml +16 -17
  52. ultralytics/cfg/trackers/bytetrack.yaml +9 -11
  53. ultralytics/data/__init__.py +4 -4
  54. ultralytics/data/annotator.py +12 -12
  55. ultralytics/data/augment.py +531 -564
  56. ultralytics/data/base.py +76 -81
  57. ultralytics/data/build.py +206 -42
  58. ultralytics/data/converter.py +179 -78
  59. ultralytics/data/dataset.py +121 -121
  60. ultralytics/data/loaders.py +114 -91
  61. ultralytics/data/split.py +28 -15
  62. ultralytics/data/split_dota.py +67 -48
  63. ultralytics/data/utils.py +110 -89
  64. ultralytics/engine/exporter.py +422 -460
  65. ultralytics/engine/model.py +224 -252
  66. ultralytics/engine/predictor.py +94 -89
  67. ultralytics/engine/results.py +345 -595
  68. ultralytics/engine/trainer.py +231 -134
  69. ultralytics/engine/tuner.py +279 -73
  70. ultralytics/engine/validator.py +53 -46
  71. ultralytics/hub/__init__.py +26 -28
  72. ultralytics/hub/auth.py +30 -16
  73. ultralytics/hub/google/__init__.py +34 -36
  74. ultralytics/hub/session.py +53 -77
  75. ultralytics/hub/utils.py +23 -109
  76. ultralytics/models/__init__.py +1 -1
  77. ultralytics/models/fastsam/__init__.py +1 -1
  78. ultralytics/models/fastsam/model.py +36 -18
  79. ultralytics/models/fastsam/predict.py +33 -44
  80. ultralytics/models/fastsam/utils.py +4 -5
  81. ultralytics/models/fastsam/val.py +12 -14
  82. ultralytics/models/nas/__init__.py +1 -1
  83. ultralytics/models/nas/model.py +16 -20
  84. ultralytics/models/nas/predict.py +12 -14
  85. ultralytics/models/nas/val.py +4 -5
  86. ultralytics/models/rtdetr/__init__.py +1 -1
  87. ultralytics/models/rtdetr/model.py +9 -9
  88. ultralytics/models/rtdetr/predict.py +22 -17
  89. ultralytics/models/rtdetr/train.py +20 -16
  90. ultralytics/models/rtdetr/val.py +79 -59
  91. ultralytics/models/sam/__init__.py +8 -2
  92. ultralytics/models/sam/amg.py +53 -38
  93. ultralytics/models/sam/build.py +29 -31
  94. ultralytics/models/sam/model.py +33 -38
  95. ultralytics/models/sam/modules/blocks.py +159 -182
  96. ultralytics/models/sam/modules/decoders.py +38 -47
  97. ultralytics/models/sam/modules/encoders.py +114 -133
  98. ultralytics/models/sam/modules/memory_attention.py +38 -31
  99. ultralytics/models/sam/modules/sam.py +114 -93
  100. ultralytics/models/sam/modules/tiny_encoder.py +268 -291
  101. ultralytics/models/sam/modules/transformer.py +59 -66
  102. ultralytics/models/sam/modules/utils.py +55 -72
  103. ultralytics/models/sam/predict.py +745 -341
  104. ultralytics/models/utils/loss.py +118 -107
  105. ultralytics/models/utils/ops.py +118 -71
  106. ultralytics/models/yolo/__init__.py +1 -1
  107. ultralytics/models/yolo/classify/predict.py +28 -26
  108. ultralytics/models/yolo/classify/train.py +50 -81
  109. ultralytics/models/yolo/classify/val.py +68 -61
  110. ultralytics/models/yolo/detect/predict.py +12 -15
  111. ultralytics/models/yolo/detect/train.py +56 -46
  112. ultralytics/models/yolo/detect/val.py +279 -223
  113. ultralytics/models/yolo/model.py +167 -86
  114. ultralytics/models/yolo/obb/predict.py +7 -11
  115. ultralytics/models/yolo/obb/train.py +23 -25
  116. ultralytics/models/yolo/obb/val.py +107 -99
  117. ultralytics/models/yolo/pose/__init__.py +1 -1
  118. ultralytics/models/yolo/pose/predict.py +12 -14
  119. ultralytics/models/yolo/pose/train.py +31 -69
  120. ultralytics/models/yolo/pose/val.py +119 -254
  121. ultralytics/models/yolo/segment/predict.py +21 -25
  122. ultralytics/models/yolo/segment/train.py +12 -66
  123. ultralytics/models/yolo/segment/val.py +126 -305
  124. ultralytics/models/yolo/world/train.py +53 -45
  125. ultralytics/models/yolo/world/train_world.py +51 -32
  126. ultralytics/models/yolo/yoloe/__init__.py +7 -7
  127. ultralytics/models/yolo/yoloe/predict.py +30 -37
  128. ultralytics/models/yolo/yoloe/train.py +89 -71
  129. ultralytics/models/yolo/yoloe/train_seg.py +15 -17
  130. ultralytics/models/yolo/yoloe/val.py +56 -41
  131. ultralytics/nn/__init__.py +9 -11
  132. ultralytics/nn/autobackend.py +179 -107
  133. ultralytics/nn/modules/__init__.py +67 -67
  134. ultralytics/nn/modules/activation.py +8 -7
  135. ultralytics/nn/modules/block.py +302 -323
  136. ultralytics/nn/modules/conv.py +61 -104
  137. ultralytics/nn/modules/head.py +488 -186
  138. ultralytics/nn/modules/transformer.py +183 -123
  139. ultralytics/nn/modules/utils.py +15 -20
  140. ultralytics/nn/tasks.py +327 -203
  141. ultralytics/nn/text_model.py +81 -65
  142. ultralytics/py.typed +1 -0
  143. ultralytics/solutions/__init__.py +12 -12
  144. ultralytics/solutions/ai_gym.py +19 -27
  145. ultralytics/solutions/analytics.py +36 -26
  146. ultralytics/solutions/config.py +29 -28
  147. ultralytics/solutions/distance_calculation.py +23 -24
  148. ultralytics/solutions/heatmap.py +17 -19
  149. ultralytics/solutions/instance_segmentation.py +21 -19
  150. ultralytics/solutions/object_blurrer.py +16 -17
  151. ultralytics/solutions/object_counter.py +48 -53
  152. ultralytics/solutions/object_cropper.py +22 -16
  153. ultralytics/solutions/parking_management.py +61 -58
  154. ultralytics/solutions/queue_management.py +19 -19
  155. ultralytics/solutions/region_counter.py +63 -50
  156. ultralytics/solutions/security_alarm.py +22 -25
  157. ultralytics/solutions/similarity_search.py +107 -60
  158. ultralytics/solutions/solutions.py +343 -262
  159. ultralytics/solutions/speed_estimation.py +35 -31
  160. ultralytics/solutions/streamlit_inference.py +104 -40
  161. ultralytics/solutions/templates/similarity-search.html +31 -24
  162. ultralytics/solutions/trackzone.py +24 -24
  163. ultralytics/solutions/vision_eye.py +11 -12
  164. ultralytics/trackers/__init__.py +1 -1
  165. ultralytics/trackers/basetrack.py +18 -27
  166. ultralytics/trackers/bot_sort.py +48 -39
  167. ultralytics/trackers/byte_tracker.py +94 -94
  168. ultralytics/trackers/track.py +7 -16
  169. ultralytics/trackers/utils/gmc.py +37 -69
  170. ultralytics/trackers/utils/kalman_filter.py +68 -76
  171. ultralytics/trackers/utils/matching.py +13 -17
  172. ultralytics/utils/__init__.py +251 -275
  173. ultralytics/utils/autobatch.py +19 -7
  174. ultralytics/utils/autodevice.py +68 -38
  175. ultralytics/utils/benchmarks.py +169 -130
  176. ultralytics/utils/callbacks/base.py +12 -13
  177. ultralytics/utils/callbacks/clearml.py +14 -15
  178. ultralytics/utils/callbacks/comet.py +139 -66
  179. ultralytics/utils/callbacks/dvc.py +19 -27
  180. ultralytics/utils/callbacks/hub.py +8 -6
  181. ultralytics/utils/callbacks/mlflow.py +6 -10
  182. ultralytics/utils/callbacks/neptune.py +11 -19
  183. ultralytics/utils/callbacks/platform.py +73 -0
  184. ultralytics/utils/callbacks/raytune.py +3 -4
  185. ultralytics/utils/callbacks/tensorboard.py +9 -12
  186. ultralytics/utils/callbacks/wb.py +33 -30
  187. ultralytics/utils/checks.py +163 -114
  188. ultralytics/utils/cpu.py +89 -0
  189. ultralytics/utils/dist.py +24 -20
  190. ultralytics/utils/downloads.py +176 -146
  191. ultralytics/utils/errors.py +11 -13
  192. ultralytics/utils/events.py +113 -0
  193. ultralytics/utils/export/__init__.py +7 -0
  194. ultralytics/utils/{export.py → export/engine.py} +81 -63
  195. ultralytics/utils/export/imx.py +294 -0
  196. ultralytics/utils/export/tensorflow.py +217 -0
  197. ultralytics/utils/files.py +33 -36
  198. ultralytics/utils/git.py +137 -0
  199. ultralytics/utils/instance.py +105 -120
  200. ultralytics/utils/logger.py +404 -0
  201. ultralytics/utils/loss.py +99 -61
  202. ultralytics/utils/metrics.py +649 -478
  203. ultralytics/utils/nms.py +337 -0
  204. ultralytics/utils/ops.py +263 -451
  205. ultralytics/utils/patches.py +70 -31
  206. ultralytics/utils/plotting.py +253 -223
  207. ultralytics/utils/tal.py +48 -61
  208. ultralytics/utils/torch_utils.py +244 -251
  209. ultralytics/utils/tqdm.py +438 -0
  210. ultralytics/utils/triton.py +22 -23
  211. ultralytics/utils/tuner.py +11 -10
  212. dgenerate_ultralytics_headless-8.3.137.dist-info/RECORD +0 -272
  213. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/entry_points.txt +0 -0
  214. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/licenses/LICENSE +0 -0
  215. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,7 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
+ from __future__ import annotations
4
+
3
5
  import re
4
6
  import shutil
5
7
  import subprocess
@@ -8,9 +10,7 @@ from multiprocessing.pool import ThreadPool
8
10
  from pathlib import Path
9
11
  from urllib import parse, request
10
12
 
11
- import torch
12
-
13
- from ultralytics.utils import LOGGER, TQDM, checks, clean_url, emojis, is_online, url2file
13
+ from ultralytics.utils import ASSETS_URL, LOGGER, TQDM, checks, clean_url, emojis, is_online, url2file
14
14
 
15
15
  # Define Ultralytics GitHub assets maintained at https://github.com/ultralytics/assets
16
16
  GITHUB_ASSETS_REPO = "ultralytics/assets"
@@ -32,47 +32,48 @@ GITHUB_ASSETS_NAMES = frozenset(
32
32
  + [f"sam2.1_{k}.pt" for k in "blst"]
33
33
  + [f"FastSAM-{k}.pt" for k in "sx"]
34
34
  + [f"rtdetr-{k}.pt" for k in "lx"]
35
- + ["mobile_sam.pt"]
36
- + ["mobileclip_blt.ts"]
37
- + ["calibration_image_sample_data_20x128x128x3_float32.npy.zip"]
35
+ + [
36
+ "mobile_sam.pt",
37
+ "mobileclip_blt.ts",
38
+ "yolo11n-grayscale.pt",
39
+ "calibration_image_sample_data_20x128x128x3_float32.npy.zip",
40
+ ]
38
41
  )
39
- GITHUB_ASSETS_STEMS = frozenset(k.rsplit(".", 1)[0] for k in GITHUB_ASSETS_NAMES)
42
+ GITHUB_ASSETS_STEMS = frozenset(k.rpartition(".")[0] for k in GITHUB_ASSETS_NAMES)
40
43
 
41
44
 
42
- def is_url(url, check=False):
43
- """
44
- Validates if the given string is a URL and optionally checks if the URL exists online.
45
+ def is_url(url: str | Path, check: bool = False) -> bool:
46
+ """Validate if the given string is a URL and optionally check if the URL exists online.
45
47
 
46
48
  Args:
47
49
  url (str): The string to be validated as a URL.
48
50
  check (bool, optional): If True, performs an additional check to see if the URL exists online.
49
- Defaults to False.
50
51
 
51
52
  Returns:
52
- (bool): Returns True for a valid URL. If 'check' is True, also returns True if the URL exists online.
53
- Returns False otherwise.
53
+ (bool): True for a valid URL. If 'check' is True, also returns True if the URL exists online.
54
54
 
55
55
  Examples:
56
56
  >>> valid = is_url("https://www.example.com")
57
+ >>> valid_and_exists = is_url("https://www.example.com", check=True)
57
58
  """
58
59
  try:
59
60
  url = str(url)
60
61
  result = parse.urlparse(url)
61
- assert all([result.scheme, result.netloc]) # check if is url
62
+ if not (result.scheme and result.netloc):
63
+ return False
62
64
  if check:
63
- with request.urlopen(url) as response:
64
- return response.getcode() == 200 # check if exists online
65
+ r = request.urlopen(request.Request(url, method="HEAD"), timeout=3)
66
+ return 200 <= r.getcode() < 400
65
67
  return True
66
68
  except Exception:
67
69
  return False
68
70
 
69
71
 
70
- def delete_dsstore(path, files_to_delete=(".DS_Store", "__MACOSX")):
71
- """
72
- Delete all ".DS_store" files in a specified directory.
72
+ def delete_dsstore(path: str | Path, files_to_delete: tuple[str, ...] = (".DS_Store", "__MACOSX")) -> None:
73
+ """Delete all specified system files in a directory.
73
74
 
74
75
  Args:
75
- path (str, optional): The directory path where the ".DS_store" files should be deleted.
76
+ path (str | Path): The directory path where the files should be deleted.
76
77
  files_to_delete (tuple): The files to be deleted.
77
78
 
78
79
  Examples:
@@ -90,16 +91,21 @@ def delete_dsstore(path, files_to_delete=(".DS_Store", "__MACOSX")):
90
91
  f.unlink()
91
92
 
92
93
 
93
- def zip_directory(directory, compress=True, exclude=(".DS_Store", "__MACOSX"), progress=True):
94
- """
95
- Zips the contents of a directory, excluding files containing strings in the exclude list. The resulting zip file is
96
- named after the directory and placed alongside it.
94
+ def zip_directory(
95
+ directory: str | Path,
96
+ compress: bool = True,
97
+ exclude: tuple[str, ...] = (".DS_Store", "__MACOSX"),
98
+ progress: bool = True,
99
+ ) -> Path:
100
+ """Zip the contents of a directory, excluding specified files.
101
+
102
+ The resulting zip file is named after the directory and placed alongside it.
97
103
 
98
104
  Args:
99
105
  directory (str | Path): The path to the directory to be zipped.
100
- compress (bool): Whether to compress the files while zipping. Default is True.
101
- exclude (tuple, optional): A tuple of filename strings to be excluded. Defaults to ('.DS_Store', '__MACOSX').
102
- progress (bool, optional): Whether to display a progress bar. Defaults to True.
106
+ compress (bool): Whether to compress the files while zipping.
107
+ exclude (tuple, optional): A tuple of filename strings to be excluded.
108
+ progress (bool, optional): Whether to display a progress bar.
103
109
 
104
110
  Returns:
105
111
  (Path): The path to the resulting zip file.
@@ -115,38 +121,43 @@ def zip_directory(directory, compress=True, exclude=(".DS_Store", "__MACOSX"), p
115
121
  if not directory.is_dir():
116
122
  raise FileNotFoundError(f"Directory '{directory}' does not exist.")
117
123
 
118
- # Unzip with progress bar
119
- files_to_zip = [f for f in directory.rglob("*") if f.is_file() and all(x not in f.name for x in exclude)]
124
+ # Zip with progress bar
125
+ files = [f for f in directory.rglob("*") if f.is_file() and all(x not in f.name for x in exclude)] # files to zip
120
126
  zip_file = directory.with_suffix(".zip")
121
127
  compression = ZIP_DEFLATED if compress else ZIP_STORED
122
128
  with ZipFile(zip_file, "w", compression) as f:
123
- for file in TQDM(files_to_zip, desc=f"Zipping {directory} to {zip_file}...", unit="file", disable=not progress):
129
+ for file in TQDM(files, desc=f"Zipping {directory} to {zip_file}...", unit="files", disable=not progress):
124
130
  f.write(file, file.relative_to(directory))
125
131
 
126
132
  return zip_file # return path to zip file
127
133
 
128
134
 
129
- def unzip_file(file, path=None, exclude=(".DS_Store", "__MACOSX"), exist_ok=False, progress=True):
130
- """
131
- Unzips a *.zip file to the specified path, excluding files containing strings in the exclude list.
135
+ def unzip_file(
136
+ file: str | Path,
137
+ path: str | Path | None = None,
138
+ exclude: tuple[str, ...] = (".DS_Store", "__MACOSX"),
139
+ exist_ok: bool = False,
140
+ progress: bool = True,
141
+ ) -> Path:
142
+ """Unzip a *.zip file to the specified path, excluding specified files.
132
143
 
133
- If the zipfile does not contain a single top-level directory, the function will create a new
134
- directory with the same name as the zipfile (without the extension) to extract its contents.
135
- If a path is not provided, the function will use the parent directory of the zipfile as the default path.
144
+ If the zipfile does not contain a single top-level directory, the function will create a new directory with the same
145
+ name as the zipfile (without the extension) to extract its contents. If a path is not provided, the function will
146
+ use the parent directory of the zipfile as the default path.
136
147
 
137
148
  Args:
138
149
  file (str | Path): The path to the zipfile to be extracted.
139
- path (str | Path, optional): The path to extract the zipfile to. Defaults to None.
140
- exclude (tuple, optional): A tuple of filename strings to be excluded. Defaults to ('.DS_Store', '__MACOSX').
141
- exist_ok (bool, optional): Whether to overwrite existing contents if they exist. Defaults to False.
142
- progress (bool, optional): Whether to display a progress bar. Defaults to True.
143
-
144
- Raises:
145
- BadZipFile: If the provided file does not exist or is not a valid zipfile.
150
+ path (str | Path, optional): The path to extract the zipfile to.
151
+ exclude (tuple, optional): A tuple of filename strings to be excluded.
152
+ exist_ok (bool, optional): Whether to overwrite existing contents if they exist.
153
+ progress (bool, optional): Whether to display a progress bar.
146
154
 
147
155
  Returns:
148
156
  (Path): The path to the directory where the zipfile was extracted.
149
157
 
158
+ Raises:
159
+ BadZipFile: If the provided file does not exist or is not a valid zipfile.
160
+
150
161
  Examples:
151
162
  >>> from ultralytics.utils.downloads import unzip_file
152
163
  >>> directory = unzip_file("path/to/file.zip")
@@ -168,7 +179,7 @@ def unzip_file(file, path=None, exclude=(".DS_Store", "__MACOSX"), exist_ok=Fals
168
179
  if unzip_as_dir:
169
180
  # Zip has 1 top-level directory
170
181
  extract_path = path # i.e. ../datasets
171
- path = Path(path) / list(top_level_dirs)[0] # i.e. extract coco8/ dir to ../datasets/
182
+ path = Path(path) / next(iter(top_level_dirs)) # i.e. extract coco8/ dir to ../datasets/
172
183
  else:
173
184
  # Zip has multiple files at top level
174
185
  path = extract_path = Path(path) / Path(file).stem # i.e. extract multiple files to ../datasets/coco8/
@@ -179,7 +190,7 @@ def unzip_file(file, path=None, exclude=(".DS_Store", "__MACOSX"), exist_ok=Fals
179
190
  LOGGER.warning(f"Skipping {file} unzip as destination directory {path} is not empty.")
180
191
  return path
181
192
 
182
- for f in TQDM(files, desc=f"Unzipping {file} to {Path(path).resolve()}...", unit="file", disable=not progress):
193
+ for f in TQDM(files, desc=f"Unzipping {file} to {Path(path).resolve()}...", unit="files", disable=not progress):
183
194
  # Ensure the file is within the extract_path to avoid path traversal security vulnerability
184
195
  if ".." in Path(f).parts:
185
196
  LOGGER.warning(f"Potentially insecure file path: {f}, skipping extraction.")
@@ -189,39 +200,31 @@ def unzip_file(file, path=None, exclude=(".DS_Store", "__MACOSX"), exist_ok=Fals
189
200
  return path # return unzip dir
190
201
 
191
202
 
192
- def check_disk_space(url="https://ultralytics.com/assets/coco8.zip", path=Path.cwd(), sf=1.5, hard=True):
193
- """
194
- Check if there is sufficient disk space to download and store a file.
203
+ def check_disk_space(
204
+ file_bytes: int,
205
+ path: str | Path = Path.cwd(),
206
+ sf: float = 1.5,
207
+ hard: bool = True,
208
+ ) -> bool:
209
+ """Check if there is sufficient disk space to download and store a file.
195
210
 
196
211
  Args:
197
- url (str, optional): The URL to the file. Defaults to 'https://ultralytics.com/assets/coco8.zip'.
212
+ file_bytes (int): The file size in bytes.
198
213
  path (str | Path, optional): The path or drive to check the available free space on.
199
- sf (float, optional): Safety factor, the multiplier for the required free space. Defaults to 1.5.
200
- hard (bool, optional): Whether to throw an error or not on insufficient disk space. Defaults to True.
214
+ sf (float, optional): Safety factor, the multiplier for the required free space.
215
+ hard (bool, optional): Whether to throw an error or not on insufficient disk space.
201
216
 
202
217
  Returns:
203
218
  (bool): True if there is sufficient disk space, False otherwise.
204
219
  """
205
- import requests # slow import
206
-
207
- try:
208
- r = requests.head(url) # response
209
- assert r.status_code < 400, f"URL error for {url}: {r.status_code} {r.reason}" # check response
210
- except Exception:
211
- return True # requests issue, default to True
212
-
213
- # Check file size
214
- gib = 1 << 30 # bytes per GiB
215
- data = int(r.headers.get("Content-Length", 0)) / gib # file size (GB)
216
- total, used, free = (x / gib for x in shutil.disk_usage(path)) # bytes
217
-
218
- if data * sf < free:
220
+ _total, _used, free = shutil.disk_usage(path) # bytes
221
+ if file_bytes * sf < free:
219
222
  return True # sufficient space
220
223
 
221
224
  # Insufficient space
222
225
  text = (
223
- f"Insufficient free disk space {free:.1f} GB < {data * sf:.3f} GB required, "
224
- f"Please free {data * sf - free:.1f} GB additional disk space and try again."
226
+ f"Insufficient free disk space {free >> 30:.3f} GB < {int(file_bytes * sf) >> 30:.3f} GB required, "
227
+ f"Please free {int(file_bytes * sf - free) >> 30:.3f} GB additional disk space and try again."
225
228
  )
226
229
  if hard:
227
230
  raise MemoryError(text)
@@ -229,25 +232,24 @@ def check_disk_space(url="https://ultralytics.com/assets/coco8.zip", path=Path.c
229
232
  return False
230
233
 
231
234
 
232
- def get_google_drive_file_info(link):
233
- """
234
- Retrieves the direct download link and filename for a shareable Google Drive file link.
235
+ def get_google_drive_file_info(link: str) -> tuple[str, str | None]:
236
+ """Retrieve the direct download link and filename for a shareable Google Drive file link.
235
237
 
236
238
  Args:
237
239
  link (str): The shareable link of the Google Drive file.
238
240
 
239
241
  Returns:
240
- (str): Direct download URL for the Google Drive file.
241
- (str): Original filename of the Google Drive file. If filename extraction fails, returns None.
242
+ url (str): Direct download URL for the Google Drive file.
243
+ filename (str | None): Original filename of the Google Drive file. If filename extraction fails, returns None.
242
244
 
243
245
  Examples:
244
246
  >>> from ultralytics.utils.downloads import get_google_drive_file_info
245
247
  >>> link = "https://drive.google.com/file/d/1cqT-cJgANNrhIHCrEufUYhQ4RqiWG_lJ/view?usp=drive_link"
246
248
  >>> url, filename = get_google_drive_file_info(link)
247
249
  """
248
- import requests # slow import
250
+ import requests # scoped as slow import
249
251
 
250
- file_id = link.split("/d/")[1].split("/view")[0]
252
+ file_id = link.split("/d/")[1].split("/view", 1)[0]
251
253
  drive_url = f"https://drive.google.com/uc?export=download&id={file_id}"
252
254
  filename = None
253
255
 
@@ -270,34 +272,34 @@ def get_google_drive_file_info(link):
270
272
 
271
273
 
272
274
  def safe_download(
273
- url,
274
- file=None,
275
- dir=None,
276
- unzip=True,
277
- delete=False,
278
- curl=False,
279
- retry=3,
280
- min_bytes=1e0,
281
- exist_ok=False,
282
- progress=True,
283
- ):
284
- """
285
- Downloads files from a URL, with options for retrying, unzipping, and deleting the downloaded file.
275
+ url: str | Path,
276
+ file: str | Path | None = None,
277
+ dir: str | Path | None = None,
278
+ unzip: bool = True,
279
+ delete: bool = False,
280
+ curl: bool = False,
281
+ retry: int = 3,
282
+ min_bytes: float = 1e0,
283
+ exist_ok: bool = False,
284
+ progress: bool = True,
285
+ ) -> Path | str:
286
+ """Download files from a URL with options for retrying, unzipping, and deleting the downloaded file. Enhanced with
287
+ robust partial download detection using Content-Length validation.
286
288
 
287
289
  Args:
288
290
  url (str): The URL of the file to be downloaded.
289
- file (str, optional): The filename of the downloaded file.
290
- If not provided, the file will be saved with the same name as the URL.
291
- dir (str | Path, optional): The directory to save the downloaded file.
292
- If not provided, the file will be saved in the current working directory.
293
- unzip (bool, optional): Whether to unzip the downloaded file. Default: True.
294
- delete (bool, optional): Whether to delete the downloaded file after unzipping. Default: False.
295
- curl (bool, optional): Whether to use curl command line tool for downloading. Default: False.
296
- retry (int, optional): The number of times to retry the download in case of failure. Default: 3.
291
+ file (str, optional): The filename of the downloaded file. If not provided, the file will be saved with the same
292
+ name as the URL.
293
+ dir (str | Path, optional): The directory to save the downloaded file. If not provided, the file will be saved
294
+ in the current working directory.
295
+ unzip (bool, optional): Whether to unzip the downloaded file.
296
+ delete (bool, optional): Whether to delete the downloaded file after unzipping.
297
+ curl (bool, optional): Whether to use curl command line tool for downloading.
298
+ retry (int, optional): The number of times to retry the download in case of failure.
297
299
  min_bytes (float, optional): The minimum number of bytes that the downloaded file should have, to be considered
298
- a successful download. Default: 1E0.
299
- exist_ok (bool, optional): Whether to overwrite existing contents during unzipping. Defaults to False.
300
- progress (bool, optional): Whether to display a progress bar during the download. Default: True.
300
+ a successful download.
301
+ exist_ok (bool, optional): Whether to overwrite existing contents during unzipping.
302
+ progress (bool, optional): Whether to display a progress bar during the download.
301
303
 
302
304
  Returns:
303
305
  (Path | str): The path to the downloaded file or extracted directory.
@@ -315,14 +317,9 @@ def safe_download(
315
317
  if "://" not in str(url) and Path(url).is_file(): # URL exists ('://' check required in Windows Python<3.10)
316
318
  f = Path(url) # filename
317
319
  elif not f.is_file(): # URL and file do not exist
318
- uri = (url if gdrive else clean_url(url)).replace( # cleaned and aliased url
319
- "https://github.com/ultralytics/assets/releases/download/v0.0.0/",
320
- "https://ultralytics.com/assets/", # assets alias
321
- )
320
+ uri = (url if gdrive else clean_url(url)).replace(ASSETS_URL, "https://ultralytics.com/assets") # clean
322
321
  desc = f"Downloading {uri} to '{f}'"
323
- LOGGER.info(f"{desc}...")
324
322
  f.parent.mkdir(parents=True, exist_ok=True) # make directory if missing
325
- check_disk_space(url, path=f.parent)
326
323
  curl_installed = shutil.which("curl")
327
324
  for i in range(retry + 1):
328
325
  try:
@@ -330,13 +327,15 @@ def safe_download(
330
327
  s = "sS" * (not progress) # silent
331
328
  r = subprocess.run(["curl", "-#", f"-{s}L", url, "-o", f, "--retry", "3", "-C", "-"]).returncode
332
329
  assert r == 0, f"Curl return value {r}"
330
+ expected_size = None # Can't get size with curl
333
331
  else: # urllib download
334
- method = "torch"
335
- if method == "torch":
336
- torch.hub.download_url_to_file(url, f, progress=progress)
337
- else:
338
- with request.urlopen(url) as response, TQDM(
339
- total=int(response.getheader("Content-Length", 0)),
332
+ with request.urlopen(url) as response:
333
+ expected_size = int(response.getheader("Content-Length", 0))
334
+ if i == 0 and expected_size > 1048576:
335
+ check_disk_space(expected_size, path=f.parent)
336
+ buffer_size = max(8192, min(1048576, expected_size // 1000)) if expected_size else 8192
337
+ with TQDM(
338
+ total=expected_size,
340
339
  desc=desc,
341
340
  disable=not progress,
342
341
  unit="B",
@@ -344,20 +343,32 @@ def safe_download(
344
343
  unit_divisor=1024,
345
344
  ) as pbar:
346
345
  with open(f, "wb") as f_opened:
347
- for data in response:
346
+ while True:
347
+ data = response.read(buffer_size)
348
+ if not data:
349
+ break
348
350
  f_opened.write(data)
349
351
  pbar.update(len(data))
350
352
 
351
353
  if f.exists():
352
- if f.stat().st_size > min_bytes:
353
- break # success
354
+ file_size = f.stat().st_size
355
+ if file_size > min_bytes:
356
+ # Check if download is complete (only if we have expected_size)
357
+ if expected_size and file_size != expected_size:
358
+ LOGGER.warning(
359
+ f"Partial download: {file_size}/{expected_size} bytes ({file_size / expected_size * 100:.1f}%)"
360
+ )
361
+ else:
362
+ break # success
354
363
  f.unlink() # remove partial downloads
364
+ except MemoryError:
365
+ raise # Re-raise immediately - no point retrying if insufficient disk space
355
366
  except Exception as e:
356
367
  if i == 0 and not is_online():
357
- raise ConnectionError(emojis(f"❌ Download failure for {uri}. Environment is not online.")) from e
368
+ raise ConnectionError(emojis(f"❌ Download failure for {uri}. Environment may be offline.")) from e
358
369
  elif i >= retry:
359
- raise ConnectionError(emojis(f"❌ Download failure for {uri}. Retry limit reached.")) from e
360
- LOGGER.warning(f"Download failure, retrying {i + 1}/{retry} {uri}...")
370
+ raise ConnectionError(emojis(f"❌ Download failure for {uri}. Retry limit reached. {e}")) from e
371
+ LOGGER.warning(f"Download failure, retrying {i + 1}/{retry} {uri}... {e}")
361
372
 
362
373
  if unzip and f.exists() and f.suffix in {"", ".zip", ".tar", ".gz"}:
363
374
  from zipfile import is_zipfile
@@ -374,24 +385,28 @@ def safe_download(
374
385
  return f
375
386
 
376
387
 
377
- def get_github_assets(repo="ultralytics/assets", version="latest", retry=False):
378
- """
379
- Retrieve the specified version's tag and assets from a GitHub repository. If the version is not specified, the
380
- function fetches the latest release assets.
388
+ def get_github_assets(
389
+ repo: str = "ultralytics/assets",
390
+ version: str = "latest",
391
+ retry: bool = False,
392
+ ) -> tuple[str, list[str]]:
393
+ """Retrieve the specified version's tag and assets from a GitHub repository.
394
+
395
+ If the version is not specified, the function fetches the latest release assets.
381
396
 
382
397
  Args:
383
- repo (str, optional): The GitHub repository in the format 'owner/repo'. Defaults to 'ultralytics/assets'.
384
- version (str, optional): The release version to fetch assets from. Defaults to 'latest'.
385
- retry (bool, optional): Flag to retry the request in case of a failure. Defaults to False.
398
+ repo (str, optional): The GitHub repository in the format 'owner/repo'.
399
+ version (str, optional): The release version to fetch assets from.
400
+ retry (bool, optional): Flag to retry the request in case of a failure.
386
401
 
387
402
  Returns:
388
- (str): The release tag.
389
- (List[str]): A list of asset names.
403
+ tag (str): The release tag.
404
+ assets (list[str]): A list of asset names.
390
405
 
391
406
  Examples:
392
407
  >>> tag, assets = get_github_assets(repo="ultralytics/assets", version="latest")
393
408
  """
394
- import requests # slow import
409
+ import requests # scoped as slow import
395
410
 
396
411
  if version != "latest":
397
412
  version = f"tags/{version}" # i.e. tags/v6.2
@@ -406,14 +421,18 @@ def get_github_assets(repo="ultralytics/assets", version="latest", retry=False):
406
421
  return data["tag_name"], [x["name"] for x in data["assets"]] # tag, assets i.e. ['yolo11n.pt', 'yolov8s.pt', ...]
407
422
 
408
423
 
409
- def attempt_download_asset(file, repo="ultralytics/assets", release="v8.3.0", **kwargs):
410
- """
411
- Attempt to download a file from GitHub release assets if it is not found locally.
424
+ def attempt_download_asset(
425
+ file: str | Path,
426
+ repo: str = "ultralytics/assets",
427
+ release: str = "v8.3.0",
428
+ **kwargs,
429
+ ) -> str:
430
+ """Attempt to download a file from GitHub release assets if it is not found locally.
412
431
 
413
432
  Args:
414
433
  file (str | Path): The filename or file path to be downloaded.
415
- repo (str, optional): The GitHub repository in the format 'owner/repo'. Defaults to 'ultralytics/assets'.
416
- release (str, optional): The specific release version to be downloaded. Defaults to 'v8.3.0'.
434
+ repo (str, optional): The GitHub repository in the format 'owner/repo'.
435
+ release (str, optional): The specific release version to be downloaded.
417
436
  **kwargs (Any): Additional keyword arguments for the download process.
418
437
 
419
438
  Returns:
@@ -457,27 +476,38 @@ def attempt_download_asset(file, repo="ultralytics/assets", release="v8.3.0", **
457
476
  return str(file)
458
477
 
459
478
 
460
- def download(url, dir=Path.cwd(), unzip=True, delete=False, curl=False, threads=1, retry=3, exist_ok=False):
461
- """
462
- Downloads files from specified URLs to a given directory. Supports concurrent downloads if multiple threads are
463
- specified.
479
+ def download(
480
+ url: str | list[str] | Path,
481
+ dir: Path = Path.cwd(),
482
+ unzip: bool = True,
483
+ delete: bool = False,
484
+ curl: bool = False,
485
+ threads: int = 1,
486
+ retry: int = 3,
487
+ exist_ok: bool = False,
488
+ ) -> None:
489
+ """Download files from specified URLs to a given directory.
490
+
491
+ Supports concurrent downloads if multiple threads are specified.
464
492
 
465
493
  Args:
466
- url (str | List[str]): The URL or list of URLs of the files to be downloaded.
467
- dir (Path, optional): The directory where the files will be saved. Defaults to the current working directory.
468
- unzip (bool, optional): Flag to unzip the files after downloading. Defaults to True.
469
- delete (bool, optional): Flag to delete the zip files after extraction. Defaults to False.
470
- curl (bool, optional): Flag to use curl for downloading. Defaults to False.
471
- threads (int, optional): Number of threads to use for concurrent downloads. Defaults to 1.
472
- retry (int, optional): Number of retries in case of download failure. Defaults to 3.
473
- exist_ok (bool, optional): Whether to overwrite existing contents during unzipping. Defaults to False.
494
+ url (str | list[str]): The URL or list of URLs of the files to be downloaded.
495
+ dir (Path, optional): The directory where the files will be saved.
496
+ unzip (bool, optional): Flag to unzip the files after downloading.
497
+ delete (bool, optional): Flag to delete the zip files after extraction.
498
+ curl (bool, optional): Flag to use curl for downloading.
499
+ threads (int, optional): Number of threads to use for concurrent downloads.
500
+ retry (int, optional): Number of retries in case of download failure.
501
+ exist_ok (bool, optional): Whether to overwrite existing contents during unzipping.
474
502
 
475
503
  Examples:
476
504
  >>> download("https://ultralytics.com/assets/example.zip", dir="path/to/dir", unzip=True)
477
505
  """
478
506
  dir = Path(dir)
479
507
  dir.mkdir(parents=True, exist_ok=True) # make directory
508
+ urls = [url] if isinstance(url, (str, Path)) else url
480
509
  if threads > 1:
510
+ LOGGER.info(f"Downloading {len(urls)} file(s) with {threads} threads to {dir}...")
481
511
  with ThreadPool(threads) as pool:
482
512
  pool.map(
483
513
  lambda x: safe_download(
@@ -488,12 +518,12 @@ def download(url, dir=Path.cwd(), unzip=True, delete=False, curl=False, threads=
488
518
  curl=curl,
489
519
  retry=retry,
490
520
  exist_ok=exist_ok,
491
- progress=threads <= 1,
521
+ progress=True,
492
522
  ),
493
- zip(url, repeat(dir)),
523
+ zip(urls, repeat(dir)),
494
524
  )
495
525
  pool.close()
496
526
  pool.join()
497
527
  else:
498
- for u in [url] if isinstance(url, (str, Path)) else url:
528
+ for u in urls:
499
529
  safe_download(url=u, dir=dir, unzip=unzip, delete=delete, curl=curl, retry=retry, exist_ok=exist_ok)
@@ -4,11 +4,10 @@ from ultralytics.utils import emojis
4
4
 
5
5
 
6
6
  class HUBModelError(Exception):
7
- """
8
- Exception raised when a model cannot be found or retrieved from Ultralytics HUB.
7
+ """Exception raised when a model cannot be found or retrieved from Ultralytics HUB.
9
8
 
10
- This custom exception is used specifically for handling errors related to model fetching in Ultralytics YOLO.
11
- The error message is processed to include emojis for better user experience.
9
+ This custom exception is used specifically for handling errors related to model fetching in Ultralytics YOLO. The
10
+ error message is processed to include emojis for better user experience.
12
11
 
13
12
  Attributes:
14
13
  message (str): The error message displayed when the exception is raised.
@@ -18,18 +17,17 @@ class HUBModelError(Exception):
18
17
 
19
18
  Examples:
20
19
  >>> try:
21
- >>> # Code that might fail to find a model
22
- >>> raise HUBModelError("Custom model not found message")
23
- >>> except HUBModelError as e:
24
- >>> print(e) # Displays the emoji-enhanced error message
20
+ ... # Code that might fail to find a model
21
+ ... raise HUBModelError("Custom model not found message")
22
+ ... except HUBModelError as e:
23
+ ... print(e) # Displays the emoji-enhanced error message
25
24
  """
26
25
 
27
- def __init__(self, message="Model not found. Please check model URL and try again."):
28
- """
29
- Initialize a HUBModelError exception.
26
+ def __init__(self, message: str = "Model not found. Please check model URL and try again."):
27
+ """Initialize a HUBModelError exception.
30
28
 
31
- This exception is raised when a requested model is not found or cannot be retrieved from Ultralytics HUB.
32
- The message is processed to include emojis for better user experience.
29
+ This exception is raised when a requested model is not found or cannot be retrieved from Ultralytics HUB. The
30
+ message is processed to include emojis for better user experience.
33
31
 
34
32
  Args:
35
33
  message (str, optional): The error message to display when the exception is raised.