dgenerate-ultralytics-headless 8.3.137__py3-none-any.whl → 8.3.224__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (215) hide show
  1. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/METADATA +41 -34
  2. dgenerate_ultralytics_headless-8.3.224.dist-info/RECORD +285 -0
  3. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/WHEEL +1 -1
  4. tests/__init__.py +7 -6
  5. tests/conftest.py +15 -39
  6. tests/test_cli.py +17 -17
  7. tests/test_cuda.py +17 -8
  8. tests/test_engine.py +36 -10
  9. tests/test_exports.py +98 -37
  10. tests/test_integrations.py +12 -15
  11. tests/test_python.py +126 -82
  12. tests/test_solutions.py +319 -135
  13. ultralytics/__init__.py +27 -9
  14. ultralytics/cfg/__init__.py +83 -87
  15. ultralytics/cfg/datasets/Argoverse.yaml +4 -4
  16. ultralytics/cfg/datasets/DOTAv1.5.yaml +2 -2
  17. ultralytics/cfg/datasets/DOTAv1.yaml +2 -2
  18. ultralytics/cfg/datasets/GlobalWheat2020.yaml +2 -2
  19. ultralytics/cfg/datasets/HomeObjects-3K.yaml +4 -5
  20. ultralytics/cfg/datasets/ImageNet.yaml +3 -3
  21. ultralytics/cfg/datasets/Objects365.yaml +24 -20
  22. ultralytics/cfg/datasets/SKU-110K.yaml +9 -9
  23. ultralytics/cfg/datasets/VOC.yaml +10 -13
  24. ultralytics/cfg/datasets/VisDrone.yaml +43 -33
  25. ultralytics/cfg/datasets/african-wildlife.yaml +5 -5
  26. ultralytics/cfg/datasets/brain-tumor.yaml +4 -5
  27. ultralytics/cfg/datasets/carparts-seg.yaml +5 -5
  28. ultralytics/cfg/datasets/coco-pose.yaml +26 -4
  29. ultralytics/cfg/datasets/coco.yaml +4 -4
  30. ultralytics/cfg/datasets/coco128-seg.yaml +2 -2
  31. ultralytics/cfg/datasets/coco128.yaml +2 -2
  32. ultralytics/cfg/datasets/coco8-grayscale.yaml +103 -0
  33. ultralytics/cfg/datasets/coco8-multispectral.yaml +2 -2
  34. ultralytics/cfg/datasets/coco8-pose.yaml +23 -2
  35. ultralytics/cfg/datasets/coco8-seg.yaml +2 -2
  36. ultralytics/cfg/datasets/coco8.yaml +2 -2
  37. ultralytics/cfg/datasets/construction-ppe.yaml +32 -0
  38. ultralytics/cfg/datasets/crack-seg.yaml +5 -5
  39. ultralytics/cfg/datasets/dog-pose.yaml +32 -4
  40. ultralytics/cfg/datasets/dota8-multispectral.yaml +2 -2
  41. ultralytics/cfg/datasets/dota8.yaml +2 -2
  42. ultralytics/cfg/datasets/hand-keypoints.yaml +29 -4
  43. ultralytics/cfg/datasets/lvis.yaml +9 -9
  44. ultralytics/cfg/datasets/medical-pills.yaml +4 -5
  45. ultralytics/cfg/datasets/open-images-v7.yaml +7 -10
  46. ultralytics/cfg/datasets/package-seg.yaml +5 -5
  47. ultralytics/cfg/datasets/signature.yaml +4 -4
  48. ultralytics/cfg/datasets/tiger-pose.yaml +20 -4
  49. ultralytics/cfg/datasets/xView.yaml +5 -5
  50. ultralytics/cfg/default.yaml +96 -93
  51. ultralytics/cfg/trackers/botsort.yaml +16 -17
  52. ultralytics/cfg/trackers/bytetrack.yaml +9 -11
  53. ultralytics/data/__init__.py +4 -4
  54. ultralytics/data/annotator.py +12 -12
  55. ultralytics/data/augment.py +531 -564
  56. ultralytics/data/base.py +76 -81
  57. ultralytics/data/build.py +206 -42
  58. ultralytics/data/converter.py +179 -78
  59. ultralytics/data/dataset.py +121 -121
  60. ultralytics/data/loaders.py +114 -91
  61. ultralytics/data/split.py +28 -15
  62. ultralytics/data/split_dota.py +67 -48
  63. ultralytics/data/utils.py +110 -89
  64. ultralytics/engine/exporter.py +422 -460
  65. ultralytics/engine/model.py +224 -252
  66. ultralytics/engine/predictor.py +94 -89
  67. ultralytics/engine/results.py +345 -595
  68. ultralytics/engine/trainer.py +231 -134
  69. ultralytics/engine/tuner.py +279 -73
  70. ultralytics/engine/validator.py +53 -46
  71. ultralytics/hub/__init__.py +26 -28
  72. ultralytics/hub/auth.py +30 -16
  73. ultralytics/hub/google/__init__.py +34 -36
  74. ultralytics/hub/session.py +53 -77
  75. ultralytics/hub/utils.py +23 -109
  76. ultralytics/models/__init__.py +1 -1
  77. ultralytics/models/fastsam/__init__.py +1 -1
  78. ultralytics/models/fastsam/model.py +36 -18
  79. ultralytics/models/fastsam/predict.py +33 -44
  80. ultralytics/models/fastsam/utils.py +4 -5
  81. ultralytics/models/fastsam/val.py +12 -14
  82. ultralytics/models/nas/__init__.py +1 -1
  83. ultralytics/models/nas/model.py +16 -20
  84. ultralytics/models/nas/predict.py +12 -14
  85. ultralytics/models/nas/val.py +4 -5
  86. ultralytics/models/rtdetr/__init__.py +1 -1
  87. ultralytics/models/rtdetr/model.py +9 -9
  88. ultralytics/models/rtdetr/predict.py +22 -17
  89. ultralytics/models/rtdetr/train.py +20 -16
  90. ultralytics/models/rtdetr/val.py +79 -59
  91. ultralytics/models/sam/__init__.py +8 -2
  92. ultralytics/models/sam/amg.py +53 -38
  93. ultralytics/models/sam/build.py +29 -31
  94. ultralytics/models/sam/model.py +33 -38
  95. ultralytics/models/sam/modules/blocks.py +159 -182
  96. ultralytics/models/sam/modules/decoders.py +38 -47
  97. ultralytics/models/sam/modules/encoders.py +114 -133
  98. ultralytics/models/sam/modules/memory_attention.py +38 -31
  99. ultralytics/models/sam/modules/sam.py +114 -93
  100. ultralytics/models/sam/modules/tiny_encoder.py +268 -291
  101. ultralytics/models/sam/modules/transformer.py +59 -66
  102. ultralytics/models/sam/modules/utils.py +55 -72
  103. ultralytics/models/sam/predict.py +745 -341
  104. ultralytics/models/utils/loss.py +118 -107
  105. ultralytics/models/utils/ops.py +118 -71
  106. ultralytics/models/yolo/__init__.py +1 -1
  107. ultralytics/models/yolo/classify/predict.py +28 -26
  108. ultralytics/models/yolo/classify/train.py +50 -81
  109. ultralytics/models/yolo/classify/val.py +68 -61
  110. ultralytics/models/yolo/detect/predict.py +12 -15
  111. ultralytics/models/yolo/detect/train.py +56 -46
  112. ultralytics/models/yolo/detect/val.py +279 -223
  113. ultralytics/models/yolo/model.py +167 -86
  114. ultralytics/models/yolo/obb/predict.py +7 -11
  115. ultralytics/models/yolo/obb/train.py +23 -25
  116. ultralytics/models/yolo/obb/val.py +107 -99
  117. ultralytics/models/yolo/pose/__init__.py +1 -1
  118. ultralytics/models/yolo/pose/predict.py +12 -14
  119. ultralytics/models/yolo/pose/train.py +31 -69
  120. ultralytics/models/yolo/pose/val.py +119 -254
  121. ultralytics/models/yolo/segment/predict.py +21 -25
  122. ultralytics/models/yolo/segment/train.py +12 -66
  123. ultralytics/models/yolo/segment/val.py +126 -305
  124. ultralytics/models/yolo/world/train.py +53 -45
  125. ultralytics/models/yolo/world/train_world.py +51 -32
  126. ultralytics/models/yolo/yoloe/__init__.py +7 -7
  127. ultralytics/models/yolo/yoloe/predict.py +30 -37
  128. ultralytics/models/yolo/yoloe/train.py +89 -71
  129. ultralytics/models/yolo/yoloe/train_seg.py +15 -17
  130. ultralytics/models/yolo/yoloe/val.py +56 -41
  131. ultralytics/nn/__init__.py +9 -11
  132. ultralytics/nn/autobackend.py +179 -107
  133. ultralytics/nn/modules/__init__.py +67 -67
  134. ultralytics/nn/modules/activation.py +8 -7
  135. ultralytics/nn/modules/block.py +302 -323
  136. ultralytics/nn/modules/conv.py +61 -104
  137. ultralytics/nn/modules/head.py +488 -186
  138. ultralytics/nn/modules/transformer.py +183 -123
  139. ultralytics/nn/modules/utils.py +15 -20
  140. ultralytics/nn/tasks.py +327 -203
  141. ultralytics/nn/text_model.py +81 -65
  142. ultralytics/py.typed +1 -0
  143. ultralytics/solutions/__init__.py +12 -12
  144. ultralytics/solutions/ai_gym.py +19 -27
  145. ultralytics/solutions/analytics.py +36 -26
  146. ultralytics/solutions/config.py +29 -28
  147. ultralytics/solutions/distance_calculation.py +23 -24
  148. ultralytics/solutions/heatmap.py +17 -19
  149. ultralytics/solutions/instance_segmentation.py +21 -19
  150. ultralytics/solutions/object_blurrer.py +16 -17
  151. ultralytics/solutions/object_counter.py +48 -53
  152. ultralytics/solutions/object_cropper.py +22 -16
  153. ultralytics/solutions/parking_management.py +61 -58
  154. ultralytics/solutions/queue_management.py +19 -19
  155. ultralytics/solutions/region_counter.py +63 -50
  156. ultralytics/solutions/security_alarm.py +22 -25
  157. ultralytics/solutions/similarity_search.py +107 -60
  158. ultralytics/solutions/solutions.py +343 -262
  159. ultralytics/solutions/speed_estimation.py +35 -31
  160. ultralytics/solutions/streamlit_inference.py +104 -40
  161. ultralytics/solutions/templates/similarity-search.html +31 -24
  162. ultralytics/solutions/trackzone.py +24 -24
  163. ultralytics/solutions/vision_eye.py +11 -12
  164. ultralytics/trackers/__init__.py +1 -1
  165. ultralytics/trackers/basetrack.py +18 -27
  166. ultralytics/trackers/bot_sort.py +48 -39
  167. ultralytics/trackers/byte_tracker.py +94 -94
  168. ultralytics/trackers/track.py +7 -16
  169. ultralytics/trackers/utils/gmc.py +37 -69
  170. ultralytics/trackers/utils/kalman_filter.py +68 -76
  171. ultralytics/trackers/utils/matching.py +13 -17
  172. ultralytics/utils/__init__.py +251 -275
  173. ultralytics/utils/autobatch.py +19 -7
  174. ultralytics/utils/autodevice.py +68 -38
  175. ultralytics/utils/benchmarks.py +169 -130
  176. ultralytics/utils/callbacks/base.py +12 -13
  177. ultralytics/utils/callbacks/clearml.py +14 -15
  178. ultralytics/utils/callbacks/comet.py +139 -66
  179. ultralytics/utils/callbacks/dvc.py +19 -27
  180. ultralytics/utils/callbacks/hub.py +8 -6
  181. ultralytics/utils/callbacks/mlflow.py +6 -10
  182. ultralytics/utils/callbacks/neptune.py +11 -19
  183. ultralytics/utils/callbacks/platform.py +73 -0
  184. ultralytics/utils/callbacks/raytune.py +3 -4
  185. ultralytics/utils/callbacks/tensorboard.py +9 -12
  186. ultralytics/utils/callbacks/wb.py +33 -30
  187. ultralytics/utils/checks.py +163 -114
  188. ultralytics/utils/cpu.py +89 -0
  189. ultralytics/utils/dist.py +24 -20
  190. ultralytics/utils/downloads.py +176 -146
  191. ultralytics/utils/errors.py +11 -13
  192. ultralytics/utils/events.py +113 -0
  193. ultralytics/utils/export/__init__.py +7 -0
  194. ultralytics/utils/{export.py → export/engine.py} +81 -63
  195. ultralytics/utils/export/imx.py +294 -0
  196. ultralytics/utils/export/tensorflow.py +217 -0
  197. ultralytics/utils/files.py +33 -36
  198. ultralytics/utils/git.py +137 -0
  199. ultralytics/utils/instance.py +105 -120
  200. ultralytics/utils/logger.py +404 -0
  201. ultralytics/utils/loss.py +99 -61
  202. ultralytics/utils/metrics.py +649 -478
  203. ultralytics/utils/nms.py +337 -0
  204. ultralytics/utils/ops.py +263 -451
  205. ultralytics/utils/patches.py +70 -31
  206. ultralytics/utils/plotting.py +253 -223
  207. ultralytics/utils/tal.py +48 -61
  208. ultralytics/utils/torch_utils.py +244 -251
  209. ultralytics/utils/tqdm.py +438 -0
  210. ultralytics/utils/triton.py +22 -23
  211. ultralytics/utils/tuner.py +11 -10
  212. dgenerate_ultralytics_headless-8.3.137.dist-info/RECORD +0 -272
  213. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/entry_points.txt +0 -0
  214. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/licenses/LICENSE +0 -0
  215. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.224.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,217 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ from __future__ import annotations
4
+
5
+ from pathlib import Path
6
+
7
+ import numpy as np
8
+ import torch
9
+
10
+ from ultralytics.nn.modules import Detect, Pose
11
+ from ultralytics.utils import LOGGER
12
+ from ultralytics.utils.downloads import attempt_download_asset
13
+ from ultralytics.utils.files import spaces_in_path
14
+ from ultralytics.utils.tal import make_anchors
15
+
16
+
17
+ def tf_wrapper(model: torch.nn.Module) -> torch.nn.Module:
18
+ """A wrapper to add TensorFlow compatible inference methods to Detect and Pose layers."""
19
+ for m in model.modules():
20
+ if not isinstance(m, Detect):
21
+ continue
22
+ import types
23
+
24
+ m._inference = types.MethodType(_tf_inference, m)
25
+ if type(m) is Pose:
26
+ m.kpts_decode = types.MethodType(tf_kpts_decode, m)
27
+ return model
28
+
29
+
30
+ def _tf_inference(self, x: list[torch.Tensor]) -> tuple[torch.Tensor]:
31
+ """Decode boxes and cls scores for tf object detection."""
32
+ shape = x[0].shape # BCHW
33
+ x_cat = torch.cat([xi.view(x[0].shape[0], self.no, -1) for xi in x], 2)
34
+ box, cls = x_cat.split((self.reg_max * 4, self.nc), 1)
35
+ if self.dynamic or self.shape != shape:
36
+ self.anchors, self.strides = (x.transpose(0, 1) for x in make_anchors(x, self.stride, 0.5))
37
+ self.shape = shape
38
+ grid_h, grid_w = shape[2], shape[3]
39
+ grid_size = torch.tensor([grid_w, grid_h, grid_w, grid_h], device=box.device).reshape(1, 4, 1)
40
+ norm = self.strides / (self.stride[0] * grid_size)
41
+ dbox = self.decode_bboxes(self.dfl(box) * norm, self.anchors.unsqueeze(0) * norm[:, :2])
42
+ return torch.cat((dbox, cls.sigmoid()), 1)
43
+
44
+
45
+ def tf_kpts_decode(self, bs: int, kpts: torch.Tensor) -> torch.Tensor:
46
+ """Decode keypoints for tf pose estimation."""
47
+ ndim = self.kpt_shape[1]
48
+ # required for TFLite export to avoid 'PLACEHOLDER_FOR_GREATER_OP_CODES' bug
49
+ # Precompute normalization factor to increase numerical stability
50
+ y = kpts.view(bs, *self.kpt_shape, -1)
51
+ grid_h, grid_w = self.shape[2], self.shape[3]
52
+ grid_size = torch.tensor([grid_w, grid_h], device=y.device).reshape(1, 2, 1)
53
+ norm = self.strides / (self.stride[0] * grid_size)
54
+ a = (y[:, :, :2] * 2.0 + (self.anchors - 0.5)) * norm
55
+ if ndim == 3:
56
+ a = torch.cat((a, y[:, :, 2:3].sigmoid()), 2)
57
+ return a.view(bs, self.nk, -1)
58
+
59
+
60
+ def onnx2saved_model(
61
+ onnx_file: str,
62
+ output_dir: Path,
63
+ int8: bool = False,
64
+ images: np.ndarray = None,
65
+ disable_group_convolution: bool = False,
66
+ prefix="",
67
+ ):
68
+ """Convert a ONNX model to TensorFlow SavedModel format via ONNX.
69
+
70
+ Args:
71
+ onnx_file (str): ONNX file path.
72
+ output_dir (Path): Output directory path for the SavedModel.
73
+ int8 (bool, optional): Enable INT8 quantization. Defaults to False.
74
+ images (np.ndarray, optional): Calibration images for INT8 quantization in BHWC format.
75
+ disable_group_convolution (bool, optional): Disable group convolution optimization. Defaults to False.
76
+ prefix (str, optional): Logging prefix. Defaults to "".
77
+
78
+ Returns:
79
+ (keras.Model): Converted Keras model.
80
+
81
+ Notes:
82
+ - Requires onnx2tf package. Downloads calibration data if INT8 quantization is enabled.
83
+ - Removes temporary files and renames quantized models after conversion.
84
+ """
85
+ # Pre-download calibration file to fix https://github.com/PINTO0309/onnx2tf/issues/545
86
+ onnx2tf_file = Path("calibration_image_sample_data_20x128x128x3_float32.npy")
87
+ if not onnx2tf_file.exists():
88
+ attempt_download_asset(f"{onnx2tf_file}.zip", unzip=True, delete=True)
89
+ np_data = None
90
+ if int8:
91
+ tmp_file = output_dir / "tmp_tflite_int8_calibration_images.npy" # int8 calibration images file
92
+ if images is not None:
93
+ output_dir.mkdir()
94
+ np.save(str(tmp_file), images) # BHWC
95
+ np_data = [["images", tmp_file, [[[[0, 0, 0]]]], [[[[255, 255, 255]]]]]]
96
+
97
+ import onnx2tf # scoped for after ONNX export for reduced conflict during import
98
+
99
+ LOGGER.info(f"{prefix} starting TFLite export with onnx2tf {onnx2tf.__version__}...")
100
+ keras_model = onnx2tf.convert(
101
+ input_onnx_file_path=onnx_file,
102
+ output_folder_path=str(output_dir),
103
+ not_use_onnxsim=True,
104
+ verbosity="error", # note INT8-FP16 activation bug https://github.com/ultralytics/ultralytics/issues/15873
105
+ output_integer_quantized_tflite=int8,
106
+ custom_input_op_name_np_data_path=np_data,
107
+ enable_batchmatmul_unfold=True and not int8, # fix lower no. of detected objects on GPU delegate
108
+ output_signaturedefs=True, # fix error with Attention block group convolution
109
+ disable_group_convolution=disable_group_convolution, # fix error with group convolution
110
+ )
111
+
112
+ # Remove/rename TFLite models
113
+ if int8:
114
+ tmp_file.unlink(missing_ok=True)
115
+ for file in output_dir.rglob("*_dynamic_range_quant.tflite"):
116
+ file.rename(file.with_name(file.stem.replace("_dynamic_range_quant", "_int8") + file.suffix))
117
+ for file in output_dir.rglob("*_integer_quant_with_int16_act.tflite"):
118
+ file.unlink() # delete extra fp16 activation TFLite files
119
+ return keras_model
120
+
121
+
122
+ def keras2pb(keras_model, file: Path, prefix=""):
123
+ """Convert a Keras model to TensorFlow GraphDef (.pb) format.
124
+
125
+ Args:
126
+ keras_model(tf_keras): Keras model to convert to frozen graph format.
127
+ file (Path): Output file path (suffix will be changed to .pb).
128
+ prefix (str, optional): Logging prefix. Defaults to "".
129
+
130
+ Notes:
131
+ Creates a frozen graph by converting variables to constants for inference optimization.
132
+ """
133
+ import tensorflow as tf
134
+ from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2
135
+
136
+ LOGGER.info(f"\n{prefix} starting export with tensorflow {tf.__version__}...")
137
+ m = tf.function(lambda x: keras_model(x)) # full model
138
+ m = m.get_concrete_function(tf.TensorSpec(keras_model.inputs[0].shape, keras_model.inputs[0].dtype))
139
+ frozen_func = convert_variables_to_constants_v2(m)
140
+ frozen_func.graph.as_graph_def()
141
+ tf.io.write_graph(graph_or_graph_def=frozen_func.graph, logdir=str(file.parent), name=file.name, as_text=False)
142
+
143
+
144
+ def tflite2edgetpu(tflite_file: str | Path, output_dir: str | Path, prefix: str = ""):
145
+ """Convert a TensorFlow Lite model to Edge TPU format using the Edge TPU compiler.
146
+
147
+ Args:
148
+ tflite_file (str | Path): Path to the input TensorFlow Lite (.tflite) model file.
149
+ output_dir (str | Path): Output directory path for the compiled Edge TPU model.
150
+ prefix (str, optional): Logging prefix. Defaults to "".
151
+
152
+ Notes:
153
+ Requires the Edge TPU compiler to be installed. The function compiles the TFLite model
154
+ for optimal performance on Google's Edge TPU hardware accelerator.
155
+ """
156
+ import subprocess
157
+
158
+ cmd = (
159
+ "edgetpu_compiler "
160
+ f'--out_dir "{output_dir}" '
161
+ "--show_operations "
162
+ "--search_delegate "
163
+ "--delegate_search_step 30 "
164
+ "--timeout_sec 180 "
165
+ f'"{tflite_file}"'
166
+ )
167
+ LOGGER.info(f"{prefix} running '{cmd}'")
168
+ subprocess.run(cmd, shell=True)
169
+
170
+
171
+ def pb2tfjs(pb_file: str, output_dir: str, half: bool = False, int8: bool = False, prefix: str = ""):
172
+ """Convert a TensorFlow GraphDef (.pb) model to TensorFlow.js format.
173
+
174
+ Args:
175
+ pb_file (str): Path to the input TensorFlow GraphDef (.pb) model file.
176
+ output_dir (str): Output directory path for the converted TensorFlow.js model.
177
+ half (bool, optional): Enable FP16 quantization. Defaults to False.
178
+ int8 (bool, optional): Enable INT8 quantization. Defaults to False.
179
+ prefix (str, optional): Logging prefix. Defaults to "".
180
+
181
+ Notes:
182
+ Requires tensorflowjs package. Uses tensorflowjs_converter command-line tool for conversion.
183
+ Handles spaces in file paths and warns if output directory contains spaces.
184
+ """
185
+ import subprocess
186
+
187
+ import tensorflow as tf
188
+ import tensorflowjs as tfjs
189
+
190
+ LOGGER.info(f"\n{prefix} starting export with tensorflowjs {tfjs.__version__}...")
191
+
192
+ gd = tf.Graph().as_graph_def() # TF GraphDef
193
+ with open(pb_file, "rb") as file:
194
+ gd.ParseFromString(file.read())
195
+ outputs = ",".join(gd_outputs(gd))
196
+ LOGGER.info(f"\n{prefix} output node names: {outputs}")
197
+
198
+ quantization = "--quantize_float16" if half else "--quantize_uint8" if int8 else ""
199
+ with spaces_in_path(pb_file) as fpb_, spaces_in_path(output_dir) as f_: # exporter can not handle spaces in path
200
+ cmd = (
201
+ "tensorflowjs_converter "
202
+ f'--input_format=tf_frozen_model {quantization} --output_node_names={outputs} "{fpb_}" "{f_}"'
203
+ )
204
+ LOGGER.info(f"{prefix} running '{cmd}'")
205
+ subprocess.run(cmd, shell=True)
206
+
207
+ if " " in output_dir:
208
+ LOGGER.warning(f"{prefix} your model may not work correctly with spaces in path '{output_dir}'.")
209
+
210
+
211
+ def gd_outputs(gd):
212
+ """Return TensorFlow GraphDef model output node names."""
213
+ name_list, input_list = [], []
214
+ for node in gd.node: # tensorflow.core.framework.node_def_pb2.NodeDef
215
+ name_list.append(node.name)
216
+ input_list.extend(node.input)
217
+ return sorted(f"{x}:0" for x in list(set(name_list) - set(input_list)) if not x.startswith("NoOp"))
@@ -1,5 +1,7 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
+ from __future__ import annotations
4
+
3
5
  import contextlib
4
6
  import glob
5
7
  import os
@@ -11,11 +13,10 @@ from pathlib import Path
11
13
 
12
14
 
13
15
  class WorkingDirectory(contextlib.ContextDecorator):
14
- """
15
- A context manager and decorator for temporarily changing the working directory.
16
+ """A context manager and decorator for temporarily changing the working directory.
16
17
 
17
- This class allows for the temporary change of the working directory using a context manager or decorator.
18
- It ensures that the original working directory is restored after the context or decorated function completes.
18
+ This class allows for the temporary change of the working directory using a context manager or decorator. It ensures
19
+ that the original working directory is restored after the context or decorated function completes.
19
20
 
20
21
  Attributes:
21
22
  dir (Path | str): The new directory to switch to.
@@ -38,24 +39,23 @@ class WorkingDirectory(contextlib.ContextDecorator):
38
39
  >>> pass
39
40
  """
40
41
 
41
- def __init__(self, new_dir):
42
- """Sets the working directory to 'new_dir' upon instantiation for use with context managers or decorators."""
42
+ def __init__(self, new_dir: str | Path):
43
+ """Initialize the WorkingDirectory context manager with the target directory."""
43
44
  self.dir = new_dir # new dir
44
45
  self.cwd = Path.cwd().resolve() # current dir
45
46
 
46
47
  def __enter__(self):
47
- """Changes the current working directory to the specified directory upon entering the context."""
48
+ """Change the current working directory to the specified directory upon entering the context."""
48
49
  os.chdir(self.dir)
49
50
 
50
- def __exit__(self, exc_type, exc_val, exc_tb): # noqa
51
- """Restores the original working directory when exiting the context."""
51
+ def __exit__(self, exc_type, exc_val, exc_tb):
52
+ """Restore the original working directory when exiting the context."""
52
53
  os.chdir(self.cwd)
53
54
 
54
55
 
55
56
  @contextmanager
56
- def spaces_in_path(path):
57
- """
58
- Context manager to handle paths with spaces in their names.
57
+ def spaces_in_path(path: str | Path):
58
+ """Context manager to handle paths with spaces in their names.
59
59
 
60
60
  If a path contains spaces, it replaces them with underscores, copies the file/directory to the new path, executes
61
61
  the context code block, then copies the file/directory back to its original location.
@@ -64,7 +64,7 @@ def spaces_in_path(path):
64
64
  path (str | Path): The original path that may contain spaces.
65
65
 
66
66
  Yields:
67
- (Path | str): Temporary path with spaces replaced by underscores if spaces were present, otherwise the original path.
67
+ (Path | str): Temporary path with any spaces replaced by underscores.
68
68
 
69
69
  Examples:
70
70
  >>> with spaces_in_path('/path/with spaces') as new_path:
@@ -82,7 +82,6 @@ def spaces_in_path(path):
82
82
 
83
83
  # Copy file/directory
84
84
  if path.is_dir():
85
- # tmp_path.mkdir(parents=True, exist_ok=True)
86
85
  shutil.copytree(path, tmp_path)
87
86
  elif path.is_file():
88
87
  tmp_path.parent.mkdir(parents=True, exist_ok=True)
@@ -104,19 +103,18 @@ def spaces_in_path(path):
104
103
  yield path
105
104
 
106
105
 
107
- def increment_path(path, exist_ok=False, sep="", mkdir=False):
108
- """
109
- Increment a file or directory path, i.e., runs/exp --> runs/exp{sep}2, runs/exp{sep}3, ... etc.
106
+ def increment_path(path: str | Path, exist_ok: bool = False, sep: str = "", mkdir: bool = False) -> Path:
107
+ """Increment a file or directory path, i.e., runs/exp --> runs/exp{sep}2, runs/exp{sep}3, ... etc.
110
108
 
111
- If the path exists and `exist_ok` is not True, the path will be incremented by appending a number and `sep` to
112
- the end of the path. If the path is a file, the file extension will be preserved. If the path is a directory, the
113
- number will be appended directly to the end of the path.
109
+ If the path exists and `exist_ok` is not True, the path will be incremented by appending a number and `sep` to the
110
+ end of the path. If the path is a file, the file extension will be preserved. If the path is a directory, the number
111
+ will be appended directly to the end of the path.
114
112
 
115
113
  Args:
116
114
  path (str | Path): Path to increment.
117
- exist_ok (bool): If True, the path will not be incremented and returned as-is.
118
- sep (str): Separator to use between the path and the incrementation number.
119
- mkdir (bool): Create a directory if it does not exist.
115
+ exist_ok (bool, optional): If True, the path will not be incremented and returned as-is.
116
+ sep (str, optional): Separator to use between the path and the incrementation number.
117
+ mkdir (bool, optional): Create a directory if it does not exist.
120
118
 
121
119
  Returns:
122
120
  (Path): Incremented path.
@@ -152,20 +150,20 @@ def increment_path(path, exist_ok=False, sep="", mkdir=False):
152
150
  return path
153
151
 
154
152
 
155
- def file_age(path=__file__):
153
+ def file_age(path: str | Path = __file__) -> int:
156
154
  """Return days since the last modification of the specified file."""
157
155
  dt = datetime.now() - datetime.fromtimestamp(Path(path).stat().st_mtime) # delta
158
156
  return dt.days # + dt.seconds / 86400 # fractional days
159
157
 
160
158
 
161
- def file_date(path=__file__):
162
- """Returns the file modification date in 'YYYY-M-D' format."""
159
+ def file_date(path: str | Path = __file__) -> str:
160
+ """Return the file modification date in 'YYYY-M-D' format."""
163
161
  t = datetime.fromtimestamp(Path(path).stat().st_mtime)
164
162
  return f"{t.year}-{t.month}-{t.day}"
165
163
 
166
164
 
167
- def file_size(path):
168
- """Returns the size of a file or directory in megabytes (MB)."""
165
+ def file_size(path: str | Path) -> float:
166
+ """Return the size of a file or directory in megabytes (MB)."""
169
167
  if isinstance(path, (str, Path)):
170
168
  mb = 1 << 20 # bytes to MiB (1024 ** 2)
171
169
  path = Path(path)
@@ -176,20 +174,19 @@ def file_size(path):
176
174
  return 0.0
177
175
 
178
176
 
179
- def get_latest_run(search_dir="."):
180
- """Returns the path to the most recent 'last.pt' file in the specified directory for resuming training."""
177
+ def get_latest_run(search_dir: str = ".") -> str:
178
+ """Return the path to the most recent 'last.pt' file in the specified directory for resuming training."""
181
179
  last_list = glob.glob(f"{search_dir}/**/last*.pt", recursive=True)
182
180
  return max(last_list, key=os.path.getctime) if last_list else ""
183
181
 
184
182
 
185
- def update_models(model_names=("yolo11n.pt",), source_dir=Path("."), update_names=False):
186
- """
187
- Update and re-save specified YOLO models in an 'updated_models' subdirectory.
183
+ def update_models(model_names: tuple = ("yolo11n.pt",), source_dir: Path = Path("."), update_names: bool = False):
184
+ """Update and re-save specified YOLO models in an 'updated_models' subdirectory.
188
185
 
189
186
  Args:
190
- model_names (Tuple[str, ...]): Model filenames to update.
191
- source_dir (Path): Directory containing models and target subdirectory.
192
- update_names (bool): Update model names from a data YAML.
187
+ model_names (tuple, optional): Model filenames to update.
188
+ source_dir (Path, optional): Directory containing models and target subdirectory.
189
+ update_names (bool, optional): Update model names from a data YAML.
193
190
 
194
191
  Examples:
195
192
  Update specified YOLO models and save them in 'updated_models' subdirectory:
@@ -0,0 +1,137 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ from __future__ import annotations
4
+
5
+ from functools import cached_property
6
+ from pathlib import Path
7
+
8
+
9
+ class GitRepo:
10
+ """Represent a local Git repository and expose branch, commit, and remote metadata.
11
+
12
+ This class discovers the repository root by searching for a .git entry from the given path upward, resolves the
13
+ actual .git directory (including worktrees), and reads Git metadata directly from on-disk files. It does not invoke
14
+ the git binary and therefore works in restricted environments. All metadata properties are resolved lazily and
15
+ cached; construct a new instance to refresh state.
16
+
17
+ Attributes:
18
+ root (Path | None): Repository root directory containing the .git entry; None if not in a repository.
19
+ gitdir (Path | None): Resolved .git directory path; handles worktrees; None if unresolved.
20
+ head (str | None): Raw contents of HEAD; a SHA for detached HEAD or "ref: <refname>" for branch heads.
21
+ is_repo (bool): Whether the provided path resides inside a Git repository.
22
+ branch (str | None): Current branch name when HEAD points to a branch; None for detached HEAD or non-repo.
23
+ commit (str | None): Current commit SHA for HEAD; None if not determinable.
24
+ origin (str | None): URL of the "origin" remote as read from gitdir/config; None if unset or unavailable.
25
+
26
+ Examples:
27
+ Initialize from the current working directory and read metadata
28
+ >>> from pathlib import Path
29
+ >>> repo = GitRepo(Path.cwd())
30
+ >>> repo.is_repo
31
+ True
32
+ >>> repo.branch, repo.commit[:7], repo.origin
33
+ ('main', '1a2b3c4', 'https://example.com/owner/repo.git')
34
+
35
+ Notes:
36
+ - Resolves metadata by reading files: HEAD, packed-refs, and config; no subprocess calls are used.
37
+ - Caches properties on first access using cached_property; recreate the object to reflect repository changes.
38
+ """
39
+
40
+ def __init__(self, path: Path = Path(__file__).resolve()):
41
+ """Initialize a Git repository context by discovering the repository root from a starting path.
42
+
43
+ Args:
44
+ path (Path, optional): File or directory path used as the starting point to locate the repository root.
45
+ """
46
+ self.root = self._find_root(path)
47
+ self.gitdir = self._gitdir(self.root) if self.root else None
48
+
49
+ @staticmethod
50
+ def _find_root(p: Path) -> Path | None:
51
+ """Return repo root or None."""
52
+ return next((d for d in [p, *list(p.parents)] if (d / ".git").exists()), None)
53
+
54
+ @staticmethod
55
+ def _gitdir(root: Path) -> Path | None:
56
+ """Resolve actual .git directory (handles worktrees)."""
57
+ g = root / ".git"
58
+ if g.is_dir():
59
+ return g
60
+ if g.is_file():
61
+ t = g.read_text(errors="ignore").strip()
62
+ if t.startswith("gitdir:"):
63
+ return (root / t.split(":", 1)[1].strip()).resolve()
64
+ return None
65
+
66
+ def _read(self, p: Path | None) -> str | None:
67
+ """Read and strip file if exists."""
68
+ return p.read_text(errors="ignore").strip() if p and p.exists() else None
69
+
70
+ @cached_property
71
+ def head(self) -> str | None:
72
+ """HEAD file contents."""
73
+ return self._read(self.gitdir / "HEAD" if self.gitdir else None)
74
+
75
+ def _ref_commit(self, ref: str) -> str | None:
76
+ """Commit for ref (handles packed-refs)."""
77
+ rf = self.gitdir / ref
78
+ s = self._read(rf)
79
+ if s:
80
+ return s
81
+ pf = self.gitdir / "packed-refs"
82
+ b = pf.read_bytes().splitlines() if pf.exists() else []
83
+ tgt = ref.encode()
84
+ for line in b:
85
+ if line[:1] in (b"#", b"^") or b" " not in line:
86
+ continue
87
+ sha, name = line.split(b" ", 1)
88
+ if name.strip() == tgt:
89
+ return sha.decode()
90
+ return None
91
+
92
+ @property
93
+ def is_repo(self) -> bool:
94
+ """True if inside a git repo."""
95
+ return self.gitdir is not None
96
+
97
+ @cached_property
98
+ def branch(self) -> str | None:
99
+ """Current branch or None."""
100
+ if not self.is_repo or not self.head or not self.head.startswith("ref: "):
101
+ return None
102
+ ref = self.head[5:].strip()
103
+ return ref[len("refs/heads/") :] if ref.startswith("refs/heads/") else ref
104
+
105
+ @cached_property
106
+ def commit(self) -> str | None:
107
+ """Current commit SHA or None."""
108
+ if not self.is_repo or not self.head:
109
+ return None
110
+ return self._ref_commit(self.head[5:].strip()) if self.head.startswith("ref: ") else self.head
111
+
112
+ @cached_property
113
+ def origin(self) -> str | None:
114
+ """Origin URL or None."""
115
+ if not self.is_repo:
116
+ return None
117
+ cfg = self.gitdir / "config"
118
+ remote, url = None, None
119
+ for s in (self._read(cfg) or "").splitlines():
120
+ t = s.strip()
121
+ if t.startswith("[") and t.endswith("]"):
122
+ remote = t.lower()
123
+ elif t.lower().startswith("url =") and remote == '[remote "origin"]':
124
+ url = t.split("=", 1)[1].strip()
125
+ break
126
+ return url
127
+
128
+
129
+ if __name__ == "__main__":
130
+ import time
131
+
132
+ g = GitRepo()
133
+ if g.is_repo:
134
+ t0 = time.perf_counter()
135
+ print(f"repo={g.root}\nbranch={g.branch}\ncommit={g.commit}\norigin={g.origin}")
136
+ dt = (time.perf_counter() - t0) * 1000
137
+ print(f"\n⏱️ Profiling: total {dt:.3f} ms")