dgenerate-ultralytics-headless 8.3.137__py3-none-any.whl → 8.3.224__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (215) hide show
  1. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/METADATA +41 -34
  2. dgenerate_ultralytics_headless-8.3.224.dist-info/RECORD +285 -0
  3. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/WHEEL +1 -1
  4. tests/__init__.py +7 -6
  5. tests/conftest.py +15 -39
  6. tests/test_cli.py +17 -17
  7. tests/test_cuda.py +17 -8
  8. tests/test_engine.py +36 -10
  9. tests/test_exports.py +98 -37
  10. tests/test_integrations.py +12 -15
  11. tests/test_python.py +126 -82
  12. tests/test_solutions.py +319 -135
  13. ultralytics/__init__.py +27 -9
  14. ultralytics/cfg/__init__.py +83 -87
  15. ultralytics/cfg/datasets/Argoverse.yaml +4 -4
  16. ultralytics/cfg/datasets/DOTAv1.5.yaml +2 -2
  17. ultralytics/cfg/datasets/DOTAv1.yaml +2 -2
  18. ultralytics/cfg/datasets/GlobalWheat2020.yaml +2 -2
  19. ultralytics/cfg/datasets/HomeObjects-3K.yaml +4 -5
  20. ultralytics/cfg/datasets/ImageNet.yaml +3 -3
  21. ultralytics/cfg/datasets/Objects365.yaml +24 -20
  22. ultralytics/cfg/datasets/SKU-110K.yaml +9 -9
  23. ultralytics/cfg/datasets/VOC.yaml +10 -13
  24. ultralytics/cfg/datasets/VisDrone.yaml +43 -33
  25. ultralytics/cfg/datasets/african-wildlife.yaml +5 -5
  26. ultralytics/cfg/datasets/brain-tumor.yaml +4 -5
  27. ultralytics/cfg/datasets/carparts-seg.yaml +5 -5
  28. ultralytics/cfg/datasets/coco-pose.yaml +26 -4
  29. ultralytics/cfg/datasets/coco.yaml +4 -4
  30. ultralytics/cfg/datasets/coco128-seg.yaml +2 -2
  31. ultralytics/cfg/datasets/coco128.yaml +2 -2
  32. ultralytics/cfg/datasets/coco8-grayscale.yaml +103 -0
  33. ultralytics/cfg/datasets/coco8-multispectral.yaml +2 -2
  34. ultralytics/cfg/datasets/coco8-pose.yaml +23 -2
  35. ultralytics/cfg/datasets/coco8-seg.yaml +2 -2
  36. ultralytics/cfg/datasets/coco8.yaml +2 -2
  37. ultralytics/cfg/datasets/construction-ppe.yaml +32 -0
  38. ultralytics/cfg/datasets/crack-seg.yaml +5 -5
  39. ultralytics/cfg/datasets/dog-pose.yaml +32 -4
  40. ultralytics/cfg/datasets/dota8-multispectral.yaml +2 -2
  41. ultralytics/cfg/datasets/dota8.yaml +2 -2
  42. ultralytics/cfg/datasets/hand-keypoints.yaml +29 -4
  43. ultralytics/cfg/datasets/lvis.yaml +9 -9
  44. ultralytics/cfg/datasets/medical-pills.yaml +4 -5
  45. ultralytics/cfg/datasets/open-images-v7.yaml +7 -10
  46. ultralytics/cfg/datasets/package-seg.yaml +5 -5
  47. ultralytics/cfg/datasets/signature.yaml +4 -4
  48. ultralytics/cfg/datasets/tiger-pose.yaml +20 -4
  49. ultralytics/cfg/datasets/xView.yaml +5 -5
  50. ultralytics/cfg/default.yaml +96 -93
  51. ultralytics/cfg/trackers/botsort.yaml +16 -17
  52. ultralytics/cfg/trackers/bytetrack.yaml +9 -11
  53. ultralytics/data/__init__.py +4 -4
  54. ultralytics/data/annotator.py +12 -12
  55. ultralytics/data/augment.py +531 -564
  56. ultralytics/data/base.py +76 -81
  57. ultralytics/data/build.py +206 -42
  58. ultralytics/data/converter.py +179 -78
  59. ultralytics/data/dataset.py +121 -121
  60. ultralytics/data/loaders.py +114 -91
  61. ultralytics/data/split.py +28 -15
  62. ultralytics/data/split_dota.py +67 -48
  63. ultralytics/data/utils.py +110 -89
  64. ultralytics/engine/exporter.py +422 -460
  65. ultralytics/engine/model.py +224 -252
  66. ultralytics/engine/predictor.py +94 -89
  67. ultralytics/engine/results.py +345 -595
  68. ultralytics/engine/trainer.py +231 -134
  69. ultralytics/engine/tuner.py +279 -73
  70. ultralytics/engine/validator.py +53 -46
  71. ultralytics/hub/__init__.py +26 -28
  72. ultralytics/hub/auth.py +30 -16
  73. ultralytics/hub/google/__init__.py +34 -36
  74. ultralytics/hub/session.py +53 -77
  75. ultralytics/hub/utils.py +23 -109
  76. ultralytics/models/__init__.py +1 -1
  77. ultralytics/models/fastsam/__init__.py +1 -1
  78. ultralytics/models/fastsam/model.py +36 -18
  79. ultralytics/models/fastsam/predict.py +33 -44
  80. ultralytics/models/fastsam/utils.py +4 -5
  81. ultralytics/models/fastsam/val.py +12 -14
  82. ultralytics/models/nas/__init__.py +1 -1
  83. ultralytics/models/nas/model.py +16 -20
  84. ultralytics/models/nas/predict.py +12 -14
  85. ultralytics/models/nas/val.py +4 -5
  86. ultralytics/models/rtdetr/__init__.py +1 -1
  87. ultralytics/models/rtdetr/model.py +9 -9
  88. ultralytics/models/rtdetr/predict.py +22 -17
  89. ultralytics/models/rtdetr/train.py +20 -16
  90. ultralytics/models/rtdetr/val.py +79 -59
  91. ultralytics/models/sam/__init__.py +8 -2
  92. ultralytics/models/sam/amg.py +53 -38
  93. ultralytics/models/sam/build.py +29 -31
  94. ultralytics/models/sam/model.py +33 -38
  95. ultralytics/models/sam/modules/blocks.py +159 -182
  96. ultralytics/models/sam/modules/decoders.py +38 -47
  97. ultralytics/models/sam/modules/encoders.py +114 -133
  98. ultralytics/models/sam/modules/memory_attention.py +38 -31
  99. ultralytics/models/sam/modules/sam.py +114 -93
  100. ultralytics/models/sam/modules/tiny_encoder.py +268 -291
  101. ultralytics/models/sam/modules/transformer.py +59 -66
  102. ultralytics/models/sam/modules/utils.py +55 -72
  103. ultralytics/models/sam/predict.py +745 -341
  104. ultralytics/models/utils/loss.py +118 -107
  105. ultralytics/models/utils/ops.py +118 -71
  106. ultralytics/models/yolo/__init__.py +1 -1
  107. ultralytics/models/yolo/classify/predict.py +28 -26
  108. ultralytics/models/yolo/classify/train.py +50 -81
  109. ultralytics/models/yolo/classify/val.py +68 -61
  110. ultralytics/models/yolo/detect/predict.py +12 -15
  111. ultralytics/models/yolo/detect/train.py +56 -46
  112. ultralytics/models/yolo/detect/val.py +279 -223
  113. ultralytics/models/yolo/model.py +167 -86
  114. ultralytics/models/yolo/obb/predict.py +7 -11
  115. ultralytics/models/yolo/obb/train.py +23 -25
  116. ultralytics/models/yolo/obb/val.py +107 -99
  117. ultralytics/models/yolo/pose/__init__.py +1 -1
  118. ultralytics/models/yolo/pose/predict.py +12 -14
  119. ultralytics/models/yolo/pose/train.py +31 -69
  120. ultralytics/models/yolo/pose/val.py +119 -254
  121. ultralytics/models/yolo/segment/predict.py +21 -25
  122. ultralytics/models/yolo/segment/train.py +12 -66
  123. ultralytics/models/yolo/segment/val.py +126 -305
  124. ultralytics/models/yolo/world/train.py +53 -45
  125. ultralytics/models/yolo/world/train_world.py +51 -32
  126. ultralytics/models/yolo/yoloe/__init__.py +7 -7
  127. ultralytics/models/yolo/yoloe/predict.py +30 -37
  128. ultralytics/models/yolo/yoloe/train.py +89 -71
  129. ultralytics/models/yolo/yoloe/train_seg.py +15 -17
  130. ultralytics/models/yolo/yoloe/val.py +56 -41
  131. ultralytics/nn/__init__.py +9 -11
  132. ultralytics/nn/autobackend.py +179 -107
  133. ultralytics/nn/modules/__init__.py +67 -67
  134. ultralytics/nn/modules/activation.py +8 -7
  135. ultralytics/nn/modules/block.py +302 -323
  136. ultralytics/nn/modules/conv.py +61 -104
  137. ultralytics/nn/modules/head.py +488 -186
  138. ultralytics/nn/modules/transformer.py +183 -123
  139. ultralytics/nn/modules/utils.py +15 -20
  140. ultralytics/nn/tasks.py +327 -203
  141. ultralytics/nn/text_model.py +81 -65
  142. ultralytics/py.typed +1 -0
  143. ultralytics/solutions/__init__.py +12 -12
  144. ultralytics/solutions/ai_gym.py +19 -27
  145. ultralytics/solutions/analytics.py +36 -26
  146. ultralytics/solutions/config.py +29 -28
  147. ultralytics/solutions/distance_calculation.py +23 -24
  148. ultralytics/solutions/heatmap.py +17 -19
  149. ultralytics/solutions/instance_segmentation.py +21 -19
  150. ultralytics/solutions/object_blurrer.py +16 -17
  151. ultralytics/solutions/object_counter.py +48 -53
  152. ultralytics/solutions/object_cropper.py +22 -16
  153. ultralytics/solutions/parking_management.py +61 -58
  154. ultralytics/solutions/queue_management.py +19 -19
  155. ultralytics/solutions/region_counter.py +63 -50
  156. ultralytics/solutions/security_alarm.py +22 -25
  157. ultralytics/solutions/similarity_search.py +107 -60
  158. ultralytics/solutions/solutions.py +343 -262
  159. ultralytics/solutions/speed_estimation.py +35 -31
  160. ultralytics/solutions/streamlit_inference.py +104 -40
  161. ultralytics/solutions/templates/similarity-search.html +31 -24
  162. ultralytics/solutions/trackzone.py +24 -24
  163. ultralytics/solutions/vision_eye.py +11 -12
  164. ultralytics/trackers/__init__.py +1 -1
  165. ultralytics/trackers/basetrack.py +18 -27
  166. ultralytics/trackers/bot_sort.py +48 -39
  167. ultralytics/trackers/byte_tracker.py +94 -94
  168. ultralytics/trackers/track.py +7 -16
  169. ultralytics/trackers/utils/gmc.py +37 -69
  170. ultralytics/trackers/utils/kalman_filter.py +68 -76
  171. ultralytics/trackers/utils/matching.py +13 -17
  172. ultralytics/utils/__init__.py +251 -275
  173. ultralytics/utils/autobatch.py +19 -7
  174. ultralytics/utils/autodevice.py +68 -38
  175. ultralytics/utils/benchmarks.py +169 -130
  176. ultralytics/utils/callbacks/base.py +12 -13
  177. ultralytics/utils/callbacks/clearml.py +14 -15
  178. ultralytics/utils/callbacks/comet.py +139 -66
  179. ultralytics/utils/callbacks/dvc.py +19 -27
  180. ultralytics/utils/callbacks/hub.py +8 -6
  181. ultralytics/utils/callbacks/mlflow.py +6 -10
  182. ultralytics/utils/callbacks/neptune.py +11 -19
  183. ultralytics/utils/callbacks/platform.py +73 -0
  184. ultralytics/utils/callbacks/raytune.py +3 -4
  185. ultralytics/utils/callbacks/tensorboard.py +9 -12
  186. ultralytics/utils/callbacks/wb.py +33 -30
  187. ultralytics/utils/checks.py +163 -114
  188. ultralytics/utils/cpu.py +89 -0
  189. ultralytics/utils/dist.py +24 -20
  190. ultralytics/utils/downloads.py +176 -146
  191. ultralytics/utils/errors.py +11 -13
  192. ultralytics/utils/events.py +113 -0
  193. ultralytics/utils/export/__init__.py +7 -0
  194. ultralytics/utils/{export.py → export/engine.py} +81 -63
  195. ultralytics/utils/export/imx.py +294 -0
  196. ultralytics/utils/export/tensorflow.py +217 -0
  197. ultralytics/utils/files.py +33 -36
  198. ultralytics/utils/git.py +137 -0
  199. ultralytics/utils/instance.py +105 -120
  200. ultralytics/utils/logger.py +404 -0
  201. ultralytics/utils/loss.py +99 -61
  202. ultralytics/utils/metrics.py +649 -478
  203. ultralytics/utils/nms.py +337 -0
  204. ultralytics/utils/ops.py +263 -451
  205. ultralytics/utils/patches.py +70 -31
  206. ultralytics/utils/plotting.py +253 -223
  207. ultralytics/utils/tal.py +48 -61
  208. ultralytics/utils/torch_utils.py +244 -251
  209. ultralytics/utils/tqdm.py +438 -0
  210. ultralytics/utils/triton.py +22 -23
  211. ultralytics/utils/tuner.py +11 -10
  212. dgenerate_ultralytics_headless-8.3.137.dist-info/RECORD +0 -272
  213. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/entry_points.txt +0 -0
  214. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/licenses/LICENSE +0 -0
  215. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,438 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ import sys
7
+ import time
8
+ from functools import lru_cache
9
+ from typing import IO, Any
10
+
11
+
12
+ @lru_cache(maxsize=1)
13
+ def is_noninteractive_console() -> bool:
14
+ """Check for known non-interactive console environments."""
15
+ return "GITHUB_ACTIONS" in os.environ or "RUNPOD_POD_ID" in os.environ
16
+
17
+
18
+ class TQDM:
19
+ """Lightweight zero-dependency progress bar for Ultralytics.
20
+
21
+ Provides clean, rich-style progress bars suitable for various environments including Weights & Biases, console
22
+ outputs, and other logging systems. Features zero external dependencies, clean single-line output, rich-style
23
+ progress bars with Unicode block characters, context manager support, iterator protocol support, and dynamic
24
+ description updates.
25
+
26
+ Attributes:
27
+ iterable (object): Iterable to wrap with progress bar.
28
+ desc (str): Prefix description for the progress bar.
29
+ total (int): Expected number of iterations.
30
+ disable (bool): Whether to disable the progress bar.
31
+ unit (str): String for units of iteration.
32
+ unit_scale (bool): Auto-scale units flag.
33
+ unit_divisor (int): Divisor for unit scaling.
34
+ leave (bool): Whether to leave the progress bar after completion.
35
+ mininterval (float): Minimum time interval between updates.
36
+ initial (int): Initial counter value.
37
+ n (int): Current iteration count.
38
+ closed (bool): Whether the progress bar is closed.
39
+ bar_format (str): Custom bar format string.
40
+ file (object): Output file stream.
41
+
42
+ Methods:
43
+ update: Update progress by n steps.
44
+ set_description: Set or update the description.
45
+ set_postfix: Set postfix for the progress bar.
46
+ close: Close the progress bar and clean up.
47
+ refresh: Refresh the progress bar display.
48
+ clear: Clear the progress bar from display.
49
+ write: Write a message without breaking the progress bar.
50
+
51
+ Examples:
52
+ Basic usage with iterator:
53
+ >>> for i in TQDM(range(100)):
54
+ ... time.sleep(0.01)
55
+
56
+ With custom description:
57
+ >>> pbar = TQDM(range(100), desc="Processing")
58
+ >>> for i in pbar:
59
+ ... pbar.set_description(f"Processing item {i}")
60
+
61
+ Context manager usage:
62
+ >>> with TQDM(total=100, unit="B", unit_scale=True) as pbar:
63
+ ... for i in range(100):
64
+ ... pbar.update(1)
65
+
66
+ Manual updates:
67
+ >>> pbar = TQDM(total=100, desc="Training")
68
+ >>> for epoch in range(100):
69
+ ... # Do work
70
+ ... pbar.update(1)
71
+ >>> pbar.close()
72
+ """
73
+
74
+ # Constants
75
+ MIN_RATE_CALC_INTERVAL = 0.01 # Minimum time interval for rate calculation
76
+ RATE_SMOOTHING_FACTOR = 0.3 # Factor for exponential smoothing of rates
77
+ MAX_SMOOTHED_RATE = 1000000 # Maximum rate to apply smoothing to
78
+ NONINTERACTIVE_MIN_INTERVAL = 60.0 # Minimum interval for non-interactive environments
79
+
80
+ def __init__(
81
+ self,
82
+ iterable: Any = None,
83
+ desc: str | None = None,
84
+ total: int | None = None,
85
+ leave: bool = True,
86
+ file: IO[str] | None = None,
87
+ mininterval: float = 0.1,
88
+ disable: bool | None = None,
89
+ unit: str = "it",
90
+ unit_scale: bool = True,
91
+ unit_divisor: int = 1000,
92
+ bar_format: str | None = None, # kept for API compatibility; not used for formatting
93
+ initial: int = 0,
94
+ **kwargs,
95
+ ) -> None:
96
+ """Initialize the TQDM progress bar with specified configuration options.
97
+
98
+ Args:
99
+ iterable (object, optional): Iterable to wrap with progress bar.
100
+ desc (str, optional): Prefix description for the progress bar.
101
+ total (int, optional): Expected number of iterations.
102
+ leave (bool, optional): Whether to leave the progress bar after completion.
103
+ file (object, optional): Output file stream for progress display.
104
+ mininterval (float, optional): Minimum time interval between updates (default 0.1s, 60s in GitHub Actions).
105
+ disable (bool, optional): Whether to disable the progress bar. Auto-detected if None.
106
+ unit (str, optional): String for units of iteration (default "it" for items).
107
+ unit_scale (bool, optional): Auto-scale units for bytes/data units.
108
+ unit_divisor (int, optional): Divisor for unit scaling (default 1000).
109
+ bar_format (str, optional): Custom bar format string.
110
+ initial (int, optional): Initial counter value.
111
+ **kwargs (Any): Additional keyword arguments for compatibility (ignored).
112
+
113
+ Examples:
114
+ >>> pbar = TQDM(range(100), desc="Processing")
115
+ >>> with TQDM(total=1000, unit="B", unit_scale=True) as pbar:
116
+ ... pbar.update(1024) # Updates by 1KB
117
+ """
118
+ # Disable if not verbose
119
+ if disable is None:
120
+ try:
121
+ from ultralytics.utils import LOGGER, VERBOSE
122
+
123
+ disable = not VERBOSE or LOGGER.getEffectiveLevel() > 20
124
+ except ImportError:
125
+ disable = False
126
+
127
+ self.iterable = iterable
128
+ self.desc = desc or ""
129
+ self.total = total or (len(iterable) if hasattr(iterable, "__len__") else None) or None # prevent total=0
130
+ self.disable = disable
131
+ self.unit = unit
132
+ self.unit_scale = unit_scale
133
+ self.unit_divisor = unit_divisor
134
+ self.leave = leave
135
+ self.noninteractive = is_noninteractive_console()
136
+ self.mininterval = max(mininterval, self.NONINTERACTIVE_MIN_INTERVAL) if self.noninteractive else mininterval
137
+ self.initial = initial
138
+
139
+ # Kept for API compatibility (unused for f-string formatting)
140
+ self.bar_format = bar_format
141
+
142
+ self.file = file or sys.stdout
143
+
144
+ # Internal state
145
+ self.n = self.initial
146
+ self.last_print_n = self.initial
147
+ self.last_print_t = time.time()
148
+ self.start_t = time.time()
149
+ self.last_rate = 0.0
150
+ self.closed = False
151
+ self.is_bytes = unit_scale and unit in ("B", "bytes")
152
+ self.scales = (
153
+ [(1073741824, "GB/s"), (1048576, "MB/s"), (1024, "KB/s")]
154
+ if self.is_bytes
155
+ else [(1e9, f"G{self.unit}/s"), (1e6, f"M{self.unit}/s"), (1e3, f"K{self.unit}/s")]
156
+ )
157
+
158
+ if not self.disable and self.total and not self.noninteractive:
159
+ self._display()
160
+
161
+ def _format_rate(self, rate: float) -> str:
162
+ """Format rate with units."""
163
+ if rate <= 0:
164
+ return ""
165
+ fallback = f"{rate:.1f}B/s" if self.is_bytes else f"{rate:.1f}{self.unit}/s"
166
+ return next((f"{rate / t:.1f}{u}" for t, u in self.scales if rate >= t), fallback)
167
+
168
+ def _format_num(self, num: int | float) -> str:
169
+ """Format number with optional unit scaling."""
170
+ if not self.unit_scale or not self.is_bytes:
171
+ return str(num)
172
+
173
+ for unit in ("", "K", "M", "G", "T"):
174
+ if abs(num) < self.unit_divisor:
175
+ return f"{num:3.1f}{unit}B" if unit else f"{num:.0f}B"
176
+ num /= self.unit_divisor
177
+ return f"{num:.1f}PB"
178
+
179
+ def _format_time(self, seconds: float) -> str:
180
+ """Format time duration."""
181
+ if seconds < 60:
182
+ return f"{seconds:.1f}s"
183
+ elif seconds < 3600:
184
+ return f"{int(seconds // 60)}:{seconds % 60:02.0f}"
185
+ else:
186
+ h, m = int(seconds // 3600), int((seconds % 3600) // 60)
187
+ return f"{h}:{m:02d}:{seconds % 60:02.0f}"
188
+
189
+ def _generate_bar(self, width: int = 12) -> str:
190
+ """Generate progress bar."""
191
+ if self.total is None:
192
+ return "━" * width if self.closed else "─" * width
193
+
194
+ frac = min(1.0, self.n / self.total)
195
+ filled = int(frac * width)
196
+ bar = "━" * filled + "─" * (width - filled)
197
+ if filled < width and frac * width - filled > 0.5:
198
+ bar = f"{bar[:filled]}╸{bar[filled + 1 :]}"
199
+ return bar
200
+
201
+ def _should_update(self, dt: float, dn: int) -> bool:
202
+ """Check if display should update."""
203
+ if self.noninteractive:
204
+ return False
205
+ return (self.total is not None and self.n >= self.total) or (dt >= self.mininterval)
206
+
207
+ def _display(self, final: bool = False) -> None:
208
+ """Display progress bar."""
209
+ if self.disable or (self.closed and not final):
210
+ return
211
+
212
+ current_time = time.time()
213
+ dt = current_time - self.last_print_t
214
+ dn = self.n - self.last_print_n
215
+
216
+ if not final and not self._should_update(dt, dn):
217
+ return
218
+
219
+ # Calculate rate (avoid crazy numbers)
220
+ if dt > self.MIN_RATE_CALC_INTERVAL:
221
+ rate = dn / dt if dt else 0.0
222
+ # Smooth rate for reasonable values, use raw rate for very high values
223
+ if rate < self.MAX_SMOOTHED_RATE:
224
+ self.last_rate = self.RATE_SMOOTHING_FACTOR * rate + (1 - self.RATE_SMOOTHING_FACTOR) * self.last_rate
225
+ rate = self.last_rate
226
+ else:
227
+ rate = self.last_rate
228
+
229
+ # At completion, use overall rate
230
+ if self.total and self.n >= self.total:
231
+ overall_elapsed = current_time - self.start_t
232
+ if overall_elapsed > 0:
233
+ rate = self.n / overall_elapsed
234
+
235
+ # Update counters
236
+ self.last_print_n = self.n
237
+ self.last_print_t = current_time
238
+ elapsed = current_time - self.start_t
239
+
240
+ # Remaining time
241
+ remaining_str = ""
242
+ if self.total and 0 < self.n < self.total and elapsed > 0:
243
+ est_rate = rate or (self.n / elapsed)
244
+ remaining_str = f"<{self._format_time((self.total - self.n) / est_rate)}"
245
+
246
+ # Numbers and percent
247
+ if self.total:
248
+ percent = (self.n / self.total) * 100
249
+ n_str = self._format_num(self.n)
250
+ t_str = self._format_num(self.total)
251
+ if self.is_bytes:
252
+ # Collapse suffix only when identical (e.g. "5.4/5.4MB")
253
+ if n_str[-2] == t_str[-2]:
254
+ n_str = n_str.rstrip("KMGTPB") # Remove unit suffix from current if different than total
255
+ else:
256
+ percent = 0.0
257
+ n_str, t_str = self._format_num(self.n), "?"
258
+
259
+ elapsed_str = self._format_time(elapsed)
260
+ rate_str = self._format_rate(rate) or (self._format_rate(self.n / elapsed) if elapsed > 0 else "")
261
+
262
+ bar = self._generate_bar()
263
+
264
+ # Compose progress line via f-strings (two shapes: with/without total)
265
+ if self.total:
266
+ if self.is_bytes and self.n >= self.total:
267
+ # Completed bytes: show only final size
268
+ progress_str = f"{self.desc}: {percent:.0f}% {bar} {t_str} {rate_str} {elapsed_str}"
269
+ else:
270
+ progress_str = (
271
+ f"{self.desc}: {percent:.0f}% {bar} {n_str}/{t_str} {rate_str} {elapsed_str}{remaining_str}"
272
+ )
273
+ else:
274
+ progress_str = f"{self.desc}: {bar} {n_str} {rate_str} {elapsed_str}"
275
+
276
+ # Write to output
277
+ try:
278
+ if self.noninteractive:
279
+ # In non-interactive environments, avoid carriage return which creates empty lines
280
+ self.file.write(progress_str)
281
+ else:
282
+ # In interactive terminals, use carriage return and clear line for updating display
283
+ self.file.write(f"\r\033[K{progress_str}")
284
+ self.file.flush()
285
+ except Exception:
286
+ pass
287
+
288
+ def update(self, n: int = 1) -> None:
289
+ """Update progress by n steps."""
290
+ if not self.disable and not self.closed:
291
+ self.n += n
292
+ self._display()
293
+
294
+ def set_description(self, desc: str | None) -> None:
295
+ """Set description."""
296
+ self.desc = desc or ""
297
+ if not self.disable:
298
+ self._display()
299
+
300
+ def set_postfix(self, **kwargs: Any) -> None:
301
+ """Set postfix (appends to description)."""
302
+ if kwargs:
303
+ postfix = ", ".join(f"{k}={v}" for k, v in kwargs.items())
304
+ base_desc = self.desc.split(" | ")[0] if " | " in self.desc else self.desc
305
+ self.set_description(f"{base_desc} | {postfix}")
306
+
307
+ def close(self) -> None:
308
+ """Close progress bar."""
309
+ if self.closed:
310
+ return
311
+
312
+ self.closed = True
313
+
314
+ if not self.disable:
315
+ # Final display
316
+ if self.total and self.n >= self.total:
317
+ self.n = self.total
318
+ self._display(final=True)
319
+
320
+ # Cleanup
321
+ if self.leave:
322
+ self.file.write("\n")
323
+ else:
324
+ self.file.write("\r\033[K")
325
+
326
+ try:
327
+ self.file.flush()
328
+ except Exception:
329
+ pass
330
+
331
+ def __enter__(self) -> TQDM:
332
+ """Enter context manager."""
333
+ return self
334
+
335
+ def __exit__(self, *args: Any) -> None:
336
+ """Exit context manager and close progress bar."""
337
+ self.close()
338
+
339
+ def __iter__(self) -> Any:
340
+ """Iterate over the wrapped iterable with progress updates."""
341
+ if self.iterable is None:
342
+ raise TypeError("'NoneType' object is not iterable")
343
+
344
+ try:
345
+ for item in self.iterable:
346
+ yield item
347
+ self.update(1)
348
+ finally:
349
+ self.close()
350
+
351
+ def __del__(self) -> None:
352
+ """Destructor to ensure cleanup."""
353
+ try:
354
+ self.close()
355
+ except Exception:
356
+ pass
357
+
358
+ def refresh(self) -> None:
359
+ """Refresh display."""
360
+ if not self.disable:
361
+ self._display()
362
+
363
+ def clear(self) -> None:
364
+ """Clear progress bar."""
365
+ if not self.disable:
366
+ try:
367
+ self.file.write("\r\033[K")
368
+ self.file.flush()
369
+ except Exception:
370
+ pass
371
+
372
+ @staticmethod
373
+ def write(s: str, file: IO[str] | None = None, end: str = "\n") -> None:
374
+ """Static method to write without breaking progress bar."""
375
+ file = file or sys.stdout
376
+ try:
377
+ file.write(s + end)
378
+ file.flush()
379
+ except Exception:
380
+ pass
381
+
382
+
383
+ if __name__ == "__main__":
384
+ import time
385
+
386
+ print("1. Basic progress bar with known total:")
387
+ for i in TQDM(range(3), desc="Known total"):
388
+ time.sleep(0.05)
389
+
390
+ print("\n2. Manual updates with known total:")
391
+ pbar = TQDM(total=300, desc="Manual updates", unit="files")
392
+ for i in range(300):
393
+ time.sleep(0.03)
394
+ pbar.update(1)
395
+ if i % 10 == 9:
396
+ pbar.set_description(f"Processing batch {i // 10 + 1}")
397
+ pbar.close()
398
+
399
+ print("\n3. Progress bar with unknown total:")
400
+ pbar = TQDM(desc="Unknown total", unit="items")
401
+ for i in range(25):
402
+ time.sleep(0.08)
403
+ pbar.update(1)
404
+ if i % 5 == 4:
405
+ pbar.set_postfix(processed=i + 1, status="OK")
406
+ pbar.close()
407
+
408
+ print("\n4. Context manager with unknown total:")
409
+ with TQDM(desc="Processing stream", unit="B", unit_scale=True, unit_divisor=1024) as pbar:
410
+ for i in range(30):
411
+ time.sleep(0.1)
412
+ pbar.update(1024 * 1024 * i) # Simulate processing MB of data
413
+
414
+ print("\n5. Iterator with unknown length:")
415
+
416
+ def data_stream():
417
+ """Simulate a data stream of unknown length."""
418
+ import random
419
+
420
+ for i in range(random.randint(10, 20)):
421
+ yield f"data_chunk_{i}"
422
+
423
+ for chunk in TQDM(data_stream(), desc="Stream processing", unit="chunks"):
424
+ time.sleep(0.1)
425
+
426
+ print("\n6. File processing simulation (unknown size):")
427
+
428
+ def process_files():
429
+ """Simulate processing files of unknown count."""
430
+ return [f"file_{i}.txt" for i in range(18)]
431
+
432
+ pbar = TQDM(desc="Scanning files", unit="files")
433
+ files = process_files()
434
+ for i, filename in enumerate(files):
435
+ time.sleep(0.06)
436
+ pbar.update(1)
437
+ pbar.set_description(f"Processing {filename}")
438
+ pbar.close()
@@ -1,17 +1,17 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- from typing import List
3
+ from __future__ import annotations
4
+
4
5
  from urllib.parse import urlsplit
5
6
 
6
7
  import numpy as np
7
8
 
8
9
 
9
10
  class TritonRemoteModel:
10
- """
11
- Client for interacting with a remote Triton Inference Server model.
11
+ """Client for interacting with a remote Triton Inference Server model.
12
12
 
13
- This class provides a convenient interface for sending inference requests to a Triton Inference Server
14
- and processing the responses.
13
+ This class provides a convenient interface for sending inference requests to a Triton Inference Server and
14
+ processing the responses. Supports both HTTP and gRPC communication protocols.
15
15
 
16
16
  Attributes:
17
17
  endpoint (str): The name of the model on the Triton server.
@@ -19,10 +19,10 @@ class TritonRemoteModel:
19
19
  triton_client: The Triton client (either HTTP or gRPC).
20
20
  InferInput: The input class for the Triton client.
21
21
  InferRequestedOutput: The output request class for the Triton client.
22
- input_formats (List[str]): The data types of the model inputs.
23
- np_input_formats (List[type]): The numpy data types of the model inputs.
24
- input_names (List[str]): The names of the model inputs.
25
- output_names (List[str]): The names of the model outputs.
22
+ input_formats (list[str]): The data types of the model inputs.
23
+ np_input_formats (list[type]): The numpy data types of the model inputs.
24
+ input_names (list[str]): The names of the model inputs.
25
+ output_names (list[str]): The names of the model outputs.
26
26
  metadata: The metadata associated with the model.
27
27
 
28
28
  Methods:
@@ -31,21 +31,21 @@ class TritonRemoteModel:
31
31
  Examples:
32
32
  Initialize a Triton client with HTTP
33
33
  >>> model = TritonRemoteModel(url="localhost:8000", endpoint="yolov8", scheme="http")
34
+
34
35
  Make inference with numpy arrays
35
36
  >>> outputs = model(np.random.rand(1, 3, 640, 640).astype(np.float32))
36
37
  """
37
38
 
38
39
  def __init__(self, url: str, endpoint: str = "", scheme: str = ""):
39
- """
40
- Initialize the TritonRemoteModel for interacting with a remote Triton Inference Server.
40
+ """Initialize the TritonRemoteModel for interacting with a remote Triton Inference Server.
41
41
 
42
42
  Arguments may be provided individually or parsed from a collective 'url' argument of the form
43
43
  <scheme>://<netloc>/<endpoint>/<task_name>
44
44
 
45
45
  Args:
46
46
  url (str): The URL of the Triton server.
47
- endpoint (str): The name of the model on the Triton server.
48
- scheme (str): The communication scheme ('http' or 'grpc').
47
+ endpoint (str, optional): The name of the model on the Triton server.
48
+ scheme (str, optional): The communication scheme ('http' or 'grpc').
49
49
 
50
50
  Examples:
51
51
  >>> model = TritonRemoteModel(url="localhost:8000", endpoint="yolov8", scheme="http")
@@ -53,7 +53,7 @@ class TritonRemoteModel:
53
53
  """
54
54
  if not endpoint and not scheme: # Parse all args from URL string
55
55
  splits = urlsplit(url)
56
- endpoint = splits.path.strip("/").split("/")[0]
56
+ endpoint = splits.path.strip("/").split("/", 1)[0]
57
57
  scheme = splits.scheme
58
58
  url = splits.netloc
59
59
 
@@ -62,12 +62,12 @@ class TritonRemoteModel:
62
62
 
63
63
  # Choose the Triton client based on the communication scheme
64
64
  if scheme == "http":
65
- import tritonclient.http as client # noqa
65
+ import tritonclient.http as client
66
66
 
67
67
  self.triton_client = client.InferenceServerClient(url=self.url, verbose=False, ssl=False)
68
68
  config = self.triton_client.get_model_config(endpoint)
69
69
  else:
70
- import tritonclient.grpc as client # noqa
70
+ import tritonclient.grpc as client
71
71
 
72
72
  self.triton_client = client.InferenceServerClient(url=self.url, verbose=False, ssl=False)
73
73
  config = self.triton_client.get_model_config(endpoint, as_json=True)["config"]
@@ -85,17 +85,16 @@ class TritonRemoteModel:
85
85
  self.output_names = [x["name"] for x in config["output"]]
86
86
  self.metadata = eval(config.get("parameters", {}).get("metadata", {}).get("string_value", "None"))
87
87
 
88
- def __call__(self, *inputs: np.ndarray) -> List[np.ndarray]:
89
- """
90
- Call the model with the given inputs.
88
+ def __call__(self, *inputs: np.ndarray) -> list[np.ndarray]:
89
+ """Call the model with the given inputs and return inference results.
91
90
 
92
91
  Args:
93
- *inputs (np.ndarray): Input data to the model. Each array should match the expected shape and type
94
- for the corresponding model input.
92
+ *inputs (np.ndarray): Input data to the model. Each array should match the expected shape and type for the
93
+ corresponding model input.
95
94
 
96
95
  Returns:
97
- (List[np.ndarray]): Model outputs with the same dtype as the input. Each element in the list
98
- corresponds to one of the model's output tensors.
96
+ (list[np.ndarray]): Model outputs with the same dtype as the input. Each element in the list corresponds to
97
+ one of the model's output tensors.
99
98
 
100
99
  Examples:
101
100
  >>> model = TritonRemoteModel(url="localhost:8000", endpoint="yolov8", scheme="http")
@@ -1,30 +1,31 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
+ from __future__ import annotations
4
+
3
5
  from ultralytics.cfg import TASK2DATA, TASK2METRIC, get_cfg, get_save_dir
4
6
  from ultralytics.utils import DEFAULT_CFG, DEFAULT_CFG_DICT, LOGGER, NUM_THREADS, checks, colorstr
5
7
 
6
8
 
7
9
  def run_ray_tune(
8
10
  model,
9
- space: dict = None,
11
+ space: dict | None = None,
10
12
  grace_period: int = 10,
11
- gpu_per_trial: int = None,
13
+ gpu_per_trial: int | None = None,
12
14
  max_samples: int = 10,
13
15
  **train_args,
14
16
  ):
15
- """
16
- Run hyperparameter tuning using Ray Tune.
17
+ """Run hyperparameter tuning using Ray Tune.
17
18
 
18
19
  Args:
19
20
  model (YOLO): Model to run the tuner on.
20
- space (dict, optional): The hyperparameter search space.
21
+ space (dict, optional): The hyperparameter search space. If not provided, uses default space.
21
22
  grace_period (int, optional): The grace period in epochs of the ASHA scheduler.
22
23
  gpu_per_trial (int, optional): The number of GPUs to allocate per trial.
23
24
  max_samples (int, optional): The maximum number of trials to run.
24
25
  **train_args (Any): Additional arguments to pass to the `train()` method.
25
26
 
26
27
  Returns:
27
- (dict): A dictionary containing the results of the hyperparameter search.
28
+ (ray.tune.ResultGrid): A ResultGrid containing the results of the hyperparameter search.
28
29
 
29
30
  Examples:
30
31
  >>> from ultralytics import YOLO
@@ -88,7 +89,7 @@ def run_ray_tune(
88
89
  model_in_store = ray.put(model)
89
90
 
90
91
  def _tune(config):
91
- """Train the YOLO model with the specified hyperparameters."""
92
+ """Train the YOLO model with the specified hyperparameters and return results."""
92
93
  model_to_train = ray.get(model_in_store) # get the model from ray store for tuning
93
94
  model_to_train.reset_callbacks()
94
95
  config.update(train_args)
@@ -98,13 +99,13 @@ def run_ray_tune(
98
99
  # Get search space
99
100
  if not space and not train_args.get("resume"):
100
101
  space = default_space
101
- LOGGER.warning("search space not provided, using default search space.")
102
+ LOGGER.warning("Search space not provided, using default search space.")
102
103
 
103
104
  # Get dataset
104
105
  data = train_args.get("data", TASK2DATA[task])
105
106
  space["data"] = data
106
107
  if "data" not in train_args:
107
- LOGGER.warning(f'data not provided, using default "data={data}".')
108
+ LOGGER.warning(f'Data not provided, using default "data={data}".')
108
109
 
109
110
  # Define the trainable function with allocated resources
110
111
  trainable_with_resources = tune.with_resources(_tune, {"cpu": NUM_THREADS, "gpu": gpu_per_trial or 0})
@@ -129,7 +130,7 @@ def run_ray_tune(
129
130
  {**train_args, **{"exist_ok": train_args.pop("resume", False)}}, # resume w/ same tune_dir
130
131
  ),
131
132
  name=train_args.pop("name", "tune"), # runs/{task}/{tune_dir}
132
- ).resolve() # must be absolute dir
133
+ ) # must be absolute dir
133
134
  tune_dir.mkdir(parents=True, exist_ok=True)
134
135
  if tune.Tuner.can_restore(tune_dir):
135
136
  LOGGER.info(f"{colorstr('Tuner: ')} Resuming tuning run {tune_dir}...")