ultralytics-opencv-headless 8.3.248__py3-none-any.whl → 8.3.253__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ultralytics/__init__.py +1 -1
- ultralytics/cfg/__init__.py +6 -4
- ultralytics/cfg/datasets/TT100K.yaml +346 -0
- ultralytics/engine/exporter.py +5 -3
- ultralytics/engine/model.py +4 -3
- ultralytics/engine/trainer.py +28 -25
- ultralytics/engine/tuner.py +1 -0
- ultralytics/nn/autobackend.py +9 -3
- ultralytics/solutions/object_counter.py +1 -1
- ultralytics/utils/benchmarks.py +1 -1
- ultralytics/utils/callbacks/platform.py +163 -59
- ultralytics/utils/checks.py +22 -3
- ultralytics/utils/export/imx.py +1 -1
- ultralytics/utils/metrics.py +1 -1
- ultralytics/utils/plotting.py +3 -0
- ultralytics/utils/tal.py +2 -1
- ultralytics/utils/torch_utils.py +1 -1
- ultralytics/utils/tqdm.py +4 -1
- ultralytics/utils/tuner.py +0 -3
- {ultralytics_opencv_headless-8.3.248.dist-info → ultralytics_opencv_headless-8.3.253.dist-info}/METADATA +1 -1
- {ultralytics_opencv_headless-8.3.248.dist-info → ultralytics_opencv_headless-8.3.253.dist-info}/RECORD +25 -24
- {ultralytics_opencv_headless-8.3.248.dist-info → ultralytics_opencv_headless-8.3.253.dist-info}/WHEEL +0 -0
- {ultralytics_opencv_headless-8.3.248.dist-info → ultralytics_opencv_headless-8.3.253.dist-info}/entry_points.txt +0 -0
- {ultralytics_opencv_headless-8.3.248.dist-info → ultralytics_opencv_headless-8.3.253.dist-info}/licenses/LICENSE +0 -0
- {ultralytics_opencv_headless-8.3.248.dist-info → ultralytics_opencv_headless-8.3.253.dist-info}/top_level.txt +0 -0
ultralytics/__init__.py
CHANGED
ultralytics/cfg/__init__.py
CHANGED
|
@@ -410,9 +410,11 @@ def get_save_dir(args: SimpleNamespace, name: str | None = None) -> Path:
|
|
|
410
410
|
else:
|
|
411
411
|
from ultralytics.utils.files import increment_path
|
|
412
412
|
|
|
413
|
-
|
|
413
|
+
runs = (ROOT.parent / "tests/tmp/runs" if TESTS_RUNNING else RUNS_DIR) / args.task
|
|
414
|
+
nested = args.project and len(Path(args.project).parts) > 1 # e.g. "user/project" or "org\repo"
|
|
415
|
+
project = runs / args.project if nested else args.project or runs
|
|
414
416
|
name = name or args.name or f"{args.mode}"
|
|
415
|
-
save_dir = increment_path(Path(project) / name, exist_ok=args.exist_ok if RANK in {-1, 0} else True)
|
|
417
|
+
save_dir = increment_path(Path(project) / name, exist_ok=args.exist_ok if RANK in {-1, 0} else True, mkdir=True)
|
|
416
418
|
|
|
417
419
|
return Path(save_dir).resolve() # resolve to display full path in console
|
|
418
420
|
|
|
@@ -725,8 +727,8 @@ def handle_yolo_solutions(args: list[str]) -> None:
|
|
|
725
727
|
)
|
|
726
728
|
if solution_name == "analytics": # analytical graphs follow fixed shape for output i.e w=1920, h=1080
|
|
727
729
|
w, h = 1280, 720
|
|
728
|
-
save_dir = get_save_dir(SimpleNamespace(
|
|
729
|
-
save_dir.mkdir(parents=True) # create the output directory i.e. runs/solutions/exp
|
|
730
|
+
save_dir = get_save_dir(SimpleNamespace(task="solutions", name="exp", exist_ok=False, project=None))
|
|
731
|
+
save_dir.mkdir(parents=True, exist_ok=True) # create the output directory i.e. runs/solutions/exp
|
|
730
732
|
vw = cv2.VideoWriter(str(save_dir / f"{solution_name}.avi"), cv2.VideoWriter_fourcc(*"mp4v"), fps, (w, h))
|
|
731
733
|
|
|
732
734
|
try: # Process video frames
|
|
@@ -0,0 +1,346 @@
|
|
|
1
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
|
+
|
|
3
|
+
# Tsinghua-Tencent 100K (TT100K) dataset https://cg.cs.tsinghua.edu.cn/traffic-sign/ by Tsinghua University
|
|
4
|
+
# Documentation: https://cg.cs.tsinghua.edu.cn/traffic-sign/tutorial.html
|
|
5
|
+
# Paper: Traffic-Sign Detection and Classification in the Wild (CVPR 2016)
|
|
6
|
+
# License: CC BY-NC 2.0 license for non-commercial use only
|
|
7
|
+
# Example usage: yolo train data=TT100K.yaml
|
|
8
|
+
# parent
|
|
9
|
+
# ├── ultralytics
|
|
10
|
+
# └── datasets
|
|
11
|
+
# └── TT100K ← downloads here (~18 GB)
|
|
12
|
+
|
|
13
|
+
# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..]
|
|
14
|
+
path: TT100K # dataset root dir
|
|
15
|
+
train: images/train # train images (relative to 'path') 6105 images
|
|
16
|
+
val: images/val # val images (relative to 'path') 7641 images (original 'other' split)
|
|
17
|
+
test: images/test # test images (relative to 'path') 3071 images
|
|
18
|
+
|
|
19
|
+
# Classes (221 traffic sign categories, 45 with sufficient training instances)
|
|
20
|
+
names:
|
|
21
|
+
0: pl5
|
|
22
|
+
1: pl10
|
|
23
|
+
2: pl15
|
|
24
|
+
3: pl20
|
|
25
|
+
4: pl25
|
|
26
|
+
5: pl30
|
|
27
|
+
6: pl40
|
|
28
|
+
7: pl50
|
|
29
|
+
8: pl60
|
|
30
|
+
9: pl70
|
|
31
|
+
10: pl80
|
|
32
|
+
11: pl90
|
|
33
|
+
12: pl100
|
|
34
|
+
13: pl110
|
|
35
|
+
14: pl120
|
|
36
|
+
15: pm5
|
|
37
|
+
16: pm10
|
|
38
|
+
17: pm13
|
|
39
|
+
18: pm15
|
|
40
|
+
19: pm20
|
|
41
|
+
20: pm25
|
|
42
|
+
21: pm30
|
|
43
|
+
22: pm35
|
|
44
|
+
23: pm40
|
|
45
|
+
24: pm46
|
|
46
|
+
25: pm50
|
|
47
|
+
26: pm55
|
|
48
|
+
27: pm8
|
|
49
|
+
28: pn
|
|
50
|
+
29: pne
|
|
51
|
+
30: ph4
|
|
52
|
+
31: ph4.5
|
|
53
|
+
32: ph5
|
|
54
|
+
33: ps
|
|
55
|
+
34: pg
|
|
56
|
+
35: ph1.5
|
|
57
|
+
36: ph2
|
|
58
|
+
37: ph2.1
|
|
59
|
+
38: ph2.2
|
|
60
|
+
39: ph2.4
|
|
61
|
+
40: ph2.5
|
|
62
|
+
41: ph2.8
|
|
63
|
+
42: ph2.9
|
|
64
|
+
43: ph3
|
|
65
|
+
44: ph3.2
|
|
66
|
+
45: ph3.5
|
|
67
|
+
46: ph3.8
|
|
68
|
+
47: ph4.2
|
|
69
|
+
48: ph4.3
|
|
70
|
+
49: ph4.8
|
|
71
|
+
50: ph5.3
|
|
72
|
+
51: ph5.5
|
|
73
|
+
52: pb
|
|
74
|
+
53: pr10
|
|
75
|
+
54: pr100
|
|
76
|
+
55: pr20
|
|
77
|
+
56: pr30
|
|
78
|
+
57: pr40
|
|
79
|
+
58: pr45
|
|
80
|
+
59: pr50
|
|
81
|
+
60: pr60
|
|
82
|
+
61: pr70
|
|
83
|
+
62: pr80
|
|
84
|
+
63: pr90
|
|
85
|
+
64: p1
|
|
86
|
+
65: p2
|
|
87
|
+
66: p3
|
|
88
|
+
67: p4
|
|
89
|
+
68: p5
|
|
90
|
+
69: p6
|
|
91
|
+
70: p7
|
|
92
|
+
71: p8
|
|
93
|
+
72: p9
|
|
94
|
+
73: p10
|
|
95
|
+
74: p11
|
|
96
|
+
75: p12
|
|
97
|
+
76: p13
|
|
98
|
+
77: p14
|
|
99
|
+
78: p15
|
|
100
|
+
79: p16
|
|
101
|
+
80: p17
|
|
102
|
+
81: p18
|
|
103
|
+
82: p19
|
|
104
|
+
83: p20
|
|
105
|
+
84: p21
|
|
106
|
+
85: p22
|
|
107
|
+
86: p23
|
|
108
|
+
87: p24
|
|
109
|
+
88: p25
|
|
110
|
+
89: p26
|
|
111
|
+
90: p27
|
|
112
|
+
91: p28
|
|
113
|
+
92: pa8
|
|
114
|
+
93: pa10
|
|
115
|
+
94: pa12
|
|
116
|
+
95: pa13
|
|
117
|
+
96: pa14
|
|
118
|
+
97: pb5
|
|
119
|
+
98: pc
|
|
120
|
+
99: pg
|
|
121
|
+
100: ph1
|
|
122
|
+
101: ph1.3
|
|
123
|
+
102: ph1.5
|
|
124
|
+
103: ph2
|
|
125
|
+
104: ph3
|
|
126
|
+
105: ph4
|
|
127
|
+
106: ph5
|
|
128
|
+
107: pi
|
|
129
|
+
108: pl0
|
|
130
|
+
109: pl4
|
|
131
|
+
110: pl5
|
|
132
|
+
111: pl8
|
|
133
|
+
112: pl10
|
|
134
|
+
113: pl15
|
|
135
|
+
114: pl20
|
|
136
|
+
115: pl25
|
|
137
|
+
116: pl30
|
|
138
|
+
117: pl35
|
|
139
|
+
118: pl40
|
|
140
|
+
119: pl50
|
|
141
|
+
120: pl60
|
|
142
|
+
121: pl65
|
|
143
|
+
122: pl70
|
|
144
|
+
123: pl80
|
|
145
|
+
124: pl90
|
|
146
|
+
125: pl100
|
|
147
|
+
126: pl110
|
|
148
|
+
127: pl120
|
|
149
|
+
128: pm2
|
|
150
|
+
129: pm8
|
|
151
|
+
130: pm10
|
|
152
|
+
131: pm13
|
|
153
|
+
132: pm15
|
|
154
|
+
133: pm20
|
|
155
|
+
134: pm25
|
|
156
|
+
135: pm30
|
|
157
|
+
136: pm35
|
|
158
|
+
137: pm40
|
|
159
|
+
138: pm46
|
|
160
|
+
139: pm50
|
|
161
|
+
140: pm55
|
|
162
|
+
141: pn
|
|
163
|
+
142: pne
|
|
164
|
+
143: po
|
|
165
|
+
144: pr10
|
|
166
|
+
145: pr100
|
|
167
|
+
146: pr20
|
|
168
|
+
147: pr30
|
|
169
|
+
148: pr40
|
|
170
|
+
149: pr45
|
|
171
|
+
150: pr50
|
|
172
|
+
151: pr60
|
|
173
|
+
152: pr70
|
|
174
|
+
153: pr80
|
|
175
|
+
154: ps
|
|
176
|
+
155: w1
|
|
177
|
+
156: w2
|
|
178
|
+
157: w3
|
|
179
|
+
158: w5
|
|
180
|
+
159: w8
|
|
181
|
+
160: w10
|
|
182
|
+
161: w12
|
|
183
|
+
162: w13
|
|
184
|
+
163: w16
|
|
185
|
+
164: w18
|
|
186
|
+
165: w20
|
|
187
|
+
166: w21
|
|
188
|
+
167: w22
|
|
189
|
+
168: w24
|
|
190
|
+
169: w28
|
|
191
|
+
170: w30
|
|
192
|
+
171: w31
|
|
193
|
+
172: w32
|
|
194
|
+
173: w34
|
|
195
|
+
174: w35
|
|
196
|
+
175: w37
|
|
197
|
+
176: w38
|
|
198
|
+
177: w41
|
|
199
|
+
178: w42
|
|
200
|
+
179: w43
|
|
201
|
+
180: w44
|
|
202
|
+
181: w45
|
|
203
|
+
182: w46
|
|
204
|
+
183: w47
|
|
205
|
+
184: w48
|
|
206
|
+
185: w49
|
|
207
|
+
186: w50
|
|
208
|
+
187: w51
|
|
209
|
+
188: w52
|
|
210
|
+
189: w53
|
|
211
|
+
190: w54
|
|
212
|
+
191: w55
|
|
213
|
+
192: w56
|
|
214
|
+
193: w57
|
|
215
|
+
194: w58
|
|
216
|
+
195: w59
|
|
217
|
+
196: w60
|
|
218
|
+
197: w62
|
|
219
|
+
198: w63
|
|
220
|
+
199: w66
|
|
221
|
+
200: i1
|
|
222
|
+
201: i2
|
|
223
|
+
202: i3
|
|
224
|
+
203: i4
|
|
225
|
+
204: i5
|
|
226
|
+
205: i6
|
|
227
|
+
206: i7
|
|
228
|
+
207: i8
|
|
229
|
+
208: i9
|
|
230
|
+
209: i10
|
|
231
|
+
210: i11
|
|
232
|
+
211: i12
|
|
233
|
+
212: i13
|
|
234
|
+
213: i14
|
|
235
|
+
214: i15
|
|
236
|
+
215: il60
|
|
237
|
+
216: il80
|
|
238
|
+
217: il100
|
|
239
|
+
218: il110
|
|
240
|
+
219: io
|
|
241
|
+
220: ip
|
|
242
|
+
|
|
243
|
+
# Download script/URL (optional) ---------------------------------------------------------------------------------------
|
|
244
|
+
download: |
|
|
245
|
+
import json
|
|
246
|
+
import shutil
|
|
247
|
+
from pathlib import Path
|
|
248
|
+
|
|
249
|
+
from PIL import Image
|
|
250
|
+
|
|
251
|
+
from ultralytics.utils import TQDM
|
|
252
|
+
from ultralytics.utils.downloads import download
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def tt100k2yolo(dir):
|
|
256
|
+
"""Convert TT100K annotations to YOLO format with images/{split} and labels/{split} structure."""
|
|
257
|
+
data_dir = dir / "data"
|
|
258
|
+
anno_file = data_dir / "annotations.json"
|
|
259
|
+
|
|
260
|
+
print("Loading annotations...")
|
|
261
|
+
with open(anno_file, encoding="utf-8") as f:
|
|
262
|
+
data = json.load(f)
|
|
263
|
+
|
|
264
|
+
# Build class name to index mapping from yaml
|
|
265
|
+
names = yaml["names"]
|
|
266
|
+
class_to_idx = {v: k for k, v in names.items()}
|
|
267
|
+
|
|
268
|
+
# Create directories
|
|
269
|
+
for split in ["train", "val", "test"]:
|
|
270
|
+
(dir / "images" / split).mkdir(parents=True, exist_ok=True)
|
|
271
|
+
(dir / "labels" / split).mkdir(parents=True, exist_ok=True)
|
|
272
|
+
|
|
273
|
+
print("Converting annotations to YOLO format...")
|
|
274
|
+
skipped = 0
|
|
275
|
+
for img_id, img_data in TQDM(data["imgs"].items(), desc="Processing"):
|
|
276
|
+
img_path_str = img_data["path"]
|
|
277
|
+
if "train" in img_path_str:
|
|
278
|
+
split = "train"
|
|
279
|
+
elif "test" in img_path_str:
|
|
280
|
+
split = "test"
|
|
281
|
+
else:
|
|
282
|
+
split = "val"
|
|
283
|
+
|
|
284
|
+
# Source and destination paths
|
|
285
|
+
src_img = data_dir / img_path_str
|
|
286
|
+
if not src_img.exists():
|
|
287
|
+
continue
|
|
288
|
+
|
|
289
|
+
dst_img = dir / "images" / split / src_img.name
|
|
290
|
+
|
|
291
|
+
# Get image dimensions
|
|
292
|
+
try:
|
|
293
|
+
with Image.open(src_img) as img:
|
|
294
|
+
img_width, img_height = img.size
|
|
295
|
+
except Exception as e:
|
|
296
|
+
print(f"Error reading {src_img}: {e}")
|
|
297
|
+
continue
|
|
298
|
+
|
|
299
|
+
# Copy image to destination
|
|
300
|
+
shutil.copy2(src_img, dst_img)
|
|
301
|
+
|
|
302
|
+
# Convert annotations
|
|
303
|
+
label_file = dir / "labels" / split / f"{src_img.stem}.txt"
|
|
304
|
+
lines = []
|
|
305
|
+
|
|
306
|
+
for obj in img_data.get("objects", []):
|
|
307
|
+
category = obj["category"]
|
|
308
|
+
if category not in class_to_idx:
|
|
309
|
+
skipped += 1
|
|
310
|
+
continue
|
|
311
|
+
|
|
312
|
+
bbox = obj["bbox"]
|
|
313
|
+
xmin, ymin = bbox["xmin"], bbox["ymin"]
|
|
314
|
+
xmax, ymax = bbox["xmax"], bbox["ymax"]
|
|
315
|
+
|
|
316
|
+
# Convert to YOLO format (normalized center coordinates and dimensions)
|
|
317
|
+
x_center = ((xmin + xmax) / 2.0) / img_width
|
|
318
|
+
y_center = ((ymin + ymax) / 2.0) / img_height
|
|
319
|
+
width = (xmax - xmin) / img_width
|
|
320
|
+
height = (ymax - ymin) / img_height
|
|
321
|
+
|
|
322
|
+
# Clip to valid range
|
|
323
|
+
x_center = max(0, min(1, x_center))
|
|
324
|
+
y_center = max(0, min(1, y_center))
|
|
325
|
+
width = max(0, min(1, width))
|
|
326
|
+
height = max(0, min(1, height))
|
|
327
|
+
|
|
328
|
+
cls_idx = class_to_idx[category]
|
|
329
|
+
lines.append(f"{cls_idx} {x_center:.6f} {y_center:.6f} {width:.6f} {height:.6f}\n")
|
|
330
|
+
|
|
331
|
+
# Write label file
|
|
332
|
+
if lines:
|
|
333
|
+
label_file.write_text("".join(lines), encoding="utf-8")
|
|
334
|
+
|
|
335
|
+
if skipped:
|
|
336
|
+
print(f"Skipped {skipped} annotations with unknown categories")
|
|
337
|
+
print("Conversion complete!")
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
# Download
|
|
341
|
+
dir = Path(yaml["path"]) # dataset root dir
|
|
342
|
+
urls = ["https://cg.cs.tsinghua.edu.cn/traffic-sign/data_model_code/data.zip"]
|
|
343
|
+
download(urls, dir=dir, curl=True, threads=1)
|
|
344
|
+
|
|
345
|
+
# Convert
|
|
346
|
+
tt100k2yolo(dir)
|
ultralytics/engine/exporter.py
CHANGED
|
@@ -505,6 +505,7 @@ class Exporter:
|
|
|
505
505
|
m.format = self.args.format
|
|
506
506
|
m.max_det = self.args.max_det
|
|
507
507
|
m.xyxy = self.args.nms and not coreml
|
|
508
|
+
m.shape = None # reset cached shape for new export input size
|
|
508
509
|
if hasattr(model, "pe") and hasattr(m, "fuse"): # for YOLOE models
|
|
509
510
|
m.fuse(model.pe.to(self.device))
|
|
510
511
|
elif isinstance(m, C2f) and not is_tf_format:
|
|
@@ -812,11 +813,11 @@ class Exporter:
|
|
|
812
813
|
assert not IS_JETSON, "Jetson Paddle exports not supported yet"
|
|
813
814
|
check_requirements(
|
|
814
815
|
(
|
|
815
|
-
"paddlepaddle-gpu"
|
|
816
|
+
"paddlepaddle-gpu>=3.0.0,!=3.3.0" # exclude 3.3.0 https://github.com/PaddlePaddle/Paddle/issues/77340
|
|
816
817
|
if torch.cuda.is_available()
|
|
817
818
|
else "paddlepaddle==3.0.0" # pin 3.0.0 for ARM64
|
|
818
819
|
if ARM64
|
|
819
|
-
else "paddlepaddle>=3.0.0",
|
|
820
|
+
else "paddlepaddle>=3.0.0,!=3.3.0", # exclude 3.3.0 https://github.com/PaddlePaddle/Paddle/issues/77340
|
|
820
821
|
"x2paddle",
|
|
821
822
|
)
|
|
822
823
|
)
|
|
@@ -859,7 +860,8 @@ class Exporter:
|
|
|
859
860
|
@try_export
|
|
860
861
|
def export_ncnn(self, prefix=colorstr("NCNN:")):
|
|
861
862
|
"""Export YOLO model to NCNN format using PNNX https://github.com/pnnx/pnnx."""
|
|
862
|
-
|
|
863
|
+
# use git source for ARM64 due to broken PyPI packages https://github.com/Tencent/ncnn/issues/6509
|
|
864
|
+
check_requirements("git+https://github.com/Tencent/ncnn.git" if ARM64 else "ncnn", cmds="--no-deps")
|
|
863
865
|
check_requirements("pnnx")
|
|
864
866
|
import ncnn
|
|
865
867
|
import pnnx
|
ultralytics/engine/model.py
CHANGED
|
@@ -275,7 +275,7 @@ class Model(torch.nn.Module):
|
|
|
275
275
|
>>> model._load("yolo11n.pt")
|
|
276
276
|
>>> model._load("path/to/weights.pth", task="detect")
|
|
277
277
|
"""
|
|
278
|
-
if weights.lower().startswith(("https://", "http://", "rtsp://", "rtmp://", "tcp://")):
|
|
278
|
+
if weights.lower().startswith(("https://", "http://", "rtsp://", "rtmp://", "tcp://", "ul://")):
|
|
279
279
|
weights = checks.check_file(weights, download_dir=SETTINGS["weights_dir"]) # download and return local file
|
|
280
280
|
weights = checks.check_model_file_from_stem(weights) # add suffix, i.e. yolo11n -> yolo11n.pt
|
|
281
281
|
|
|
@@ -403,7 +403,7 @@ class Model(torch.nn.Module):
|
|
|
403
403
|
}
|
|
404
404
|
torch.save({**self.ckpt, **updates}, filename)
|
|
405
405
|
|
|
406
|
-
def info(self, detailed: bool = False, verbose: bool = True):
|
|
406
|
+
def info(self, detailed: bool = False, verbose: bool = True, imgsz: int | list[int, int] = 640):
|
|
407
407
|
"""Display model information.
|
|
408
408
|
|
|
409
409
|
This method provides an overview or detailed information about the model, depending on the arguments
|
|
@@ -412,6 +412,7 @@ class Model(torch.nn.Module):
|
|
|
412
412
|
Args:
|
|
413
413
|
detailed (bool): If True, shows detailed information about the model layers and parameters.
|
|
414
414
|
verbose (bool): If True, prints the information. If False, returns the information as a list.
|
|
415
|
+
imgsz (int | list[int, int]): Input image size used for FLOPs calculation.
|
|
415
416
|
|
|
416
417
|
Returns:
|
|
417
418
|
(list[str]): A list of strings containing various types of information about the model, including model
|
|
@@ -423,7 +424,7 @@ class Model(torch.nn.Module):
|
|
|
423
424
|
>>> info_list = model.info(detailed=True, verbose=False) # Returns detailed info as a list
|
|
424
425
|
"""
|
|
425
426
|
self._check_is_pytorch_model()
|
|
426
|
-
return self.model.info(detailed=detailed, verbose=verbose)
|
|
427
|
+
return self.model.info(detailed=detailed, verbose=verbose, imgsz=imgsz)
|
|
427
428
|
|
|
428
429
|
def fuse(self) -> None:
|
|
429
430
|
"""Fuse Conv2d and BatchNorm2d layers in the model for optimized inference.
|
ultralytics/engine/trainer.py
CHANGED
|
@@ -157,6 +157,27 @@ class BaseTrainer:
|
|
|
157
157
|
if self.device.type in {"cpu", "mps"}:
|
|
158
158
|
self.args.workers = 0 # faster CPU training as time dominated by inference, not dataloading
|
|
159
159
|
|
|
160
|
+
# Callbacks - initialize early so on_pretrain_routine_start can capture original args.data
|
|
161
|
+
self.callbacks = _callbacks or callbacks.get_default_callbacks()
|
|
162
|
+
|
|
163
|
+
if isinstance(self.args.device, str) and len(self.args.device): # i.e. device='0' or device='0,1,2,3'
|
|
164
|
+
world_size = len(self.args.device.split(","))
|
|
165
|
+
elif isinstance(self.args.device, (tuple, list)): # i.e. device=[0, 1, 2, 3] (multi-GPU from CLI is list)
|
|
166
|
+
world_size = len(self.args.device)
|
|
167
|
+
elif self.args.device in {"cpu", "mps"}: # i.e. device='cpu' or 'mps'
|
|
168
|
+
world_size = 0
|
|
169
|
+
elif torch.cuda.is_available(): # i.e. device=None or device='' or device=number
|
|
170
|
+
world_size = 1 # default to device 0
|
|
171
|
+
else: # i.e. device=None or device=''
|
|
172
|
+
world_size = 0
|
|
173
|
+
|
|
174
|
+
self.ddp = world_size > 1 and "LOCAL_RANK" not in os.environ
|
|
175
|
+
self.world_size = world_size
|
|
176
|
+
# Run on_pretrain_routine_start before get_dataset() to capture original args.data (e.g., ul:// URIs)
|
|
177
|
+
if RANK in {-1, 0} and not self.ddp:
|
|
178
|
+
callbacks.add_integration_callbacks(self)
|
|
179
|
+
self.run_callbacks("on_pretrain_routine_start")
|
|
180
|
+
|
|
160
181
|
# Model and Dataset
|
|
161
182
|
self.model = check_model_file_from_stem(self.args.model) # add suffix, i.e. yolo11n -> yolo11n.pt
|
|
162
183
|
with torch_distributed_zero_first(LOCAL_RANK): # avoid auto-downloading dataset multiple times
|
|
@@ -180,28 +201,6 @@ class BaseTrainer:
|
|
|
180
201
|
self.plot_idx = [0, 1, 2]
|
|
181
202
|
self.nan_recovery_attempts = 0
|
|
182
203
|
|
|
183
|
-
# Callbacks
|
|
184
|
-
self.callbacks = _callbacks or callbacks.get_default_callbacks()
|
|
185
|
-
|
|
186
|
-
if isinstance(self.args.device, str) and len(self.args.device): # i.e. device='0' or device='0,1,2,3'
|
|
187
|
-
world_size = len(self.args.device.split(","))
|
|
188
|
-
elif isinstance(self.args.device, (tuple, list)): # i.e. device=[0, 1, 2, 3] (multi-GPU from CLI is list)
|
|
189
|
-
world_size = len(self.args.device)
|
|
190
|
-
elif self.args.device in {"cpu", "mps"}: # i.e. device='cpu' or 'mps'
|
|
191
|
-
world_size = 0
|
|
192
|
-
elif torch.cuda.is_available(): # i.e. device=None or device='' or device=number
|
|
193
|
-
world_size = 1 # default to device 0
|
|
194
|
-
else: # i.e. device=None or device=''
|
|
195
|
-
world_size = 0
|
|
196
|
-
|
|
197
|
-
self.ddp = world_size > 1 and "LOCAL_RANK" not in os.environ
|
|
198
|
-
self.world_size = world_size
|
|
199
|
-
# Run subprocess if DDP training, else train normally
|
|
200
|
-
if RANK in {-1, 0} and not self.ddp:
|
|
201
|
-
callbacks.add_integration_callbacks(self)
|
|
202
|
-
# Start console logging immediately at trainer initialization
|
|
203
|
-
self.run_callbacks("on_pretrain_routine_start")
|
|
204
|
-
|
|
205
204
|
def add_callback(self, event: str, callback):
|
|
206
205
|
"""Append the given callback to the event's callback list."""
|
|
207
206
|
self.callbacks[event].append(callback)
|
|
@@ -631,13 +630,17 @@ class BaseTrainer:
|
|
|
631
630
|
try:
|
|
632
631
|
if self.args.task == "classify":
|
|
633
632
|
data = check_cls_dataset(self.args.data)
|
|
634
|
-
elif str(self.args.data).rsplit(".", 1)[-1] == "ndjson"
|
|
635
|
-
|
|
633
|
+
elif str(self.args.data).rsplit(".", 1)[-1] == "ndjson" or (
|
|
634
|
+
str(self.args.data).startswith("ul://") and "/datasets/" in str(self.args.data)
|
|
635
|
+
):
|
|
636
|
+
# Convert NDJSON to YOLO format (including ul:// platform dataset URIs)
|
|
636
637
|
import asyncio
|
|
637
638
|
|
|
638
639
|
from ultralytics.data.converter import convert_ndjson_to_yolo
|
|
640
|
+
from ultralytics.utils.checks import check_file
|
|
639
641
|
|
|
640
|
-
|
|
642
|
+
ndjson_file = check_file(self.args.data) # Resolve ul:// or URL to local .ndjson file
|
|
643
|
+
yaml_path = asyncio.run(convert_ndjson_to_yolo(ndjson_file))
|
|
641
644
|
self.args.data = str(yaml_path)
|
|
642
645
|
data = check_det_dataset(self.args.data)
|
|
643
646
|
elif str(self.args.data).rsplit(".", 1)[-1] in {"yaml", "yml"} or self.args.task in {
|
ultralytics/engine/tuner.py
CHANGED
|
@@ -378,6 +378,7 @@ class Tuner:
|
|
|
378
378
|
metrics = {}
|
|
379
379
|
train_args = {**vars(self.args), **mutated_hyp}
|
|
380
380
|
save_dir = get_save_dir(get_cfg(train_args))
|
|
381
|
+
train_args["save_dir"] = str(save_dir) # pass save_dir to subprocess to ensure same path is used
|
|
381
382
|
weights_dir = save_dir / "weights"
|
|
382
383
|
try:
|
|
383
384
|
# Train YOLO model with mutated hyperparameters (run in subprocess to avoid dataloader hang)
|
ultralytics/nn/autobackend.py
CHANGED
|
@@ -497,11 +497,11 @@ class AutoBackend(nn.Module):
|
|
|
497
497
|
elif paddle:
|
|
498
498
|
LOGGER.info(f"Loading {w} for PaddlePaddle inference...")
|
|
499
499
|
check_requirements(
|
|
500
|
-
"paddlepaddle-gpu"
|
|
500
|
+
"paddlepaddle-gpu>=3.0.0,!=3.3.0" # exclude 3.3.0 https://github.com/PaddlePaddle/Paddle/issues/77340
|
|
501
501
|
if torch.cuda.is_available()
|
|
502
502
|
else "paddlepaddle==3.0.0" # pin 3.0.0 for ARM64
|
|
503
503
|
if ARM64
|
|
504
|
-
else "paddlepaddle>=3.0.0"
|
|
504
|
+
else "paddlepaddle>=3.0.0,!=3.3.0" # exclude 3.3.0 https://github.com/PaddlePaddle/Paddle/issues/77340
|
|
505
505
|
)
|
|
506
506
|
import paddle.inference as pdi
|
|
507
507
|
|
|
@@ -545,11 +545,17 @@ class AutoBackend(nn.Module):
|
|
|
545
545
|
# NCNN
|
|
546
546
|
elif ncnn:
|
|
547
547
|
LOGGER.info(f"Loading {w} for NCNN inference...")
|
|
548
|
+
# use git source for ARM64 due to broken PyPI packages https://github.com/Tencent/ncnn/issues/6509
|
|
548
549
|
check_requirements("git+https://github.com/Tencent/ncnn.git" if ARM64 else "ncnn", cmds="--no-deps")
|
|
549
550
|
import ncnn as pyncnn
|
|
550
551
|
|
|
551
552
|
net = pyncnn.Net()
|
|
552
|
-
|
|
553
|
+
if isinstance(cuda, torch.device):
|
|
554
|
+
net.opt.use_vulkan_compute = cuda
|
|
555
|
+
elif isinstance(device, str) and device.startswith("vulkan"):
|
|
556
|
+
net.opt.use_vulkan_compute = True
|
|
557
|
+
net.set_vulkan_device(int(device.split(":")[1]))
|
|
558
|
+
device = torch.device("cpu")
|
|
553
559
|
w = Path(w)
|
|
554
560
|
if not w.is_file(): # if not *.param
|
|
555
561
|
w = next(w.glob("*.param")) # get *.param file from *_ncnn_model dir
|
|
@@ -129,7 +129,7 @@ class ObjectCounter(BaseSolution):
|
|
|
129
129
|
str.capitalize(key): f"{'IN ' + str(value['IN']) if self.show_in else ''} "
|
|
130
130
|
f"{'OUT ' + str(value['OUT']) if self.show_out else ''}".strip()
|
|
131
131
|
for key, value in self.classwise_count.items()
|
|
132
|
-
if value["IN"] != 0 or (value["OUT"] != 0 and
|
|
132
|
+
if (value["IN"] != 0 and self.show_in) or (value["OUT"] != 0 and self.show_out)
|
|
133
133
|
}
|
|
134
134
|
if labels_dict:
|
|
135
135
|
self.annotator.display_analytics(plot_im, labels_dict, (104, 31, 17), (255, 255, 255), self.margin)
|
ultralytics/utils/benchmarks.py
CHANGED
|
@@ -460,7 +460,7 @@ class ProfileModels:
|
|
|
460
460
|
if file.suffix in {".pt", ".yaml", ".yml"}:
|
|
461
461
|
model = YOLO(str(file))
|
|
462
462
|
model.fuse() # to report correct params and GFLOPs in model.info()
|
|
463
|
-
model_info = model.info()
|
|
463
|
+
model_info = model.info(imgsz=self.imgsz)
|
|
464
464
|
if self.trt and self.device.type != "cpu" and not engine_file.is_file():
|
|
465
465
|
engine_file = model.export(
|
|
466
466
|
format="engine",
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
4
|
import platform
|
|
5
|
+
import re
|
|
5
6
|
import socket
|
|
6
7
|
import sys
|
|
7
8
|
from concurrent.futures import ThreadPoolExecutor
|
|
@@ -11,9 +12,14 @@ from time import time
|
|
|
11
12
|
from ultralytics.utils import ENVIRONMENT, GIT, LOGGER, PYTHON_VERSION, RANK, SETTINGS, TESTS_RUNNING, colorstr
|
|
12
13
|
|
|
13
14
|
PREFIX = colorstr("Platform: ")
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def slugify(text):
|
|
18
|
+
"""Convert text to URL-safe slug (e.g., 'My Project 1' -> 'my-project-1')."""
|
|
19
|
+
if not text:
|
|
20
|
+
return text
|
|
21
|
+
return re.sub(r"-+", "-", re.sub(r"[^a-z0-9\s-]", "", str(text).lower()).replace(" ", "-")).strip("-")[:128]
|
|
22
|
+
|
|
17
23
|
|
|
18
24
|
try:
|
|
19
25
|
assert not TESTS_RUNNING # do not log pytest
|
|
@@ -32,6 +38,83 @@ except (AssertionError, ImportError):
|
|
|
32
38
|
_api_key = None
|
|
33
39
|
|
|
34
40
|
|
|
41
|
+
def resolve_platform_uri(uri, hard=True):
|
|
42
|
+
"""Resolve ul:// URIs to signed URLs by authenticating with Ultralytics Platform.
|
|
43
|
+
|
|
44
|
+
Formats:
|
|
45
|
+
ul://username/datasets/slug -> Returns signed URL to NDJSON file
|
|
46
|
+
ul://username/project/model -> Returns signed URL to .pt file
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
uri (str): Platform URI starting with "ul://".
|
|
50
|
+
hard (bool): Whether to raise an error if resolution fails (FileNotFoundError only).
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
(str | None): Signed URL on success, None if not found and hard=False.
|
|
54
|
+
|
|
55
|
+
Raises:
|
|
56
|
+
ValueError: If API key is missing/invalid or URI format is wrong.
|
|
57
|
+
PermissionError: If access is denied.
|
|
58
|
+
RuntimeError: If resource is not ready (e.g., dataset still processing).
|
|
59
|
+
FileNotFoundError: If resource not found and hard=True.
|
|
60
|
+
ConnectionError: If network request fails and hard=True.
|
|
61
|
+
"""
|
|
62
|
+
import requests
|
|
63
|
+
|
|
64
|
+
path = uri[5:] # Remove "ul://"
|
|
65
|
+
parts = path.split("/")
|
|
66
|
+
|
|
67
|
+
api_key = os.getenv("ULTRALYTICS_API_KEY") or SETTINGS.get("api_key")
|
|
68
|
+
if not api_key:
|
|
69
|
+
raise ValueError(f"ULTRALYTICS_API_KEY required for '{uri}'. Get key at https://alpha.ultralytics.com/settings")
|
|
70
|
+
|
|
71
|
+
base = "https://alpha.ultralytics.com/api/webhooks"
|
|
72
|
+
headers = {"Authorization": f"Bearer {api_key}"}
|
|
73
|
+
|
|
74
|
+
# ul://username/datasets/slug
|
|
75
|
+
if len(parts) == 3 and parts[1] == "datasets":
|
|
76
|
+
username, _, slug = parts
|
|
77
|
+
url = f"{base}/datasets/{username}/{slug}/export"
|
|
78
|
+
|
|
79
|
+
# ul://username/project/model
|
|
80
|
+
elif len(parts) == 3:
|
|
81
|
+
username, project, model = parts
|
|
82
|
+
url = f"{base}/models/{username}/{project}/{model}/download"
|
|
83
|
+
|
|
84
|
+
else:
|
|
85
|
+
raise ValueError(f"Invalid platform URI: {uri}. Use ul://user/datasets/name or ul://user/project/model")
|
|
86
|
+
|
|
87
|
+
try:
|
|
88
|
+
r = requests.head(url, headers=headers, allow_redirects=False, timeout=30)
|
|
89
|
+
|
|
90
|
+
# Handle redirect responses (301, 302, 303, 307, 308)
|
|
91
|
+
if 300 <= r.status_code < 400 and "location" in r.headers:
|
|
92
|
+
return r.headers["location"] # Return signed URL
|
|
93
|
+
|
|
94
|
+
# Handle error responses
|
|
95
|
+
if r.status_code == 401:
|
|
96
|
+
raise ValueError(f"Invalid ULTRALYTICS_API_KEY for '{uri}'")
|
|
97
|
+
if r.status_code == 403:
|
|
98
|
+
raise PermissionError(f"Access denied for '{uri}'. Check dataset/model visibility settings.")
|
|
99
|
+
if r.status_code == 404:
|
|
100
|
+
if hard:
|
|
101
|
+
raise FileNotFoundError(f"Not found on platform: {uri}")
|
|
102
|
+
LOGGER.warning(f"Not found on platform: {uri}")
|
|
103
|
+
return None
|
|
104
|
+
if r.status_code == 409:
|
|
105
|
+
raise RuntimeError(f"Resource not ready: {uri}. Dataset may still be processing.")
|
|
106
|
+
|
|
107
|
+
# Unexpected response
|
|
108
|
+
r.raise_for_status()
|
|
109
|
+
raise RuntimeError(f"Unexpected response from platform for '{uri}': {r.status_code}")
|
|
110
|
+
|
|
111
|
+
except requests.exceptions.RequestException as e:
|
|
112
|
+
if hard:
|
|
113
|
+
raise ConnectionError(f"Failed to resolve {uri}: {e}") from e
|
|
114
|
+
LOGGER.warning(f"Failed to resolve {uri}: {e}")
|
|
115
|
+
return None
|
|
116
|
+
|
|
117
|
+
|
|
35
118
|
def _interp_plot(plot, n=101):
|
|
36
119
|
"""Interpolate plot curve data from 1000 to n points to reduce storage size."""
|
|
37
120
|
import numpy as np
|
|
@@ -60,22 +143,28 @@ def _interp_plot(plot, n=101):
|
|
|
60
143
|
return result
|
|
61
144
|
|
|
62
145
|
|
|
63
|
-
def _send(event, data, project, name):
|
|
64
|
-
"""Send event to Platform endpoint."""
|
|
146
|
+
def _send(event, data, project, name, model_id=None):
|
|
147
|
+
"""Send event to Platform endpoint. Returns response JSON on success."""
|
|
65
148
|
try:
|
|
66
|
-
|
|
149
|
+
payload = {"event": event, "project": project, "name": name, "data": data}
|
|
150
|
+
if model_id:
|
|
151
|
+
payload["modelId"] = model_id
|
|
152
|
+
r = requests.post(
|
|
67
153
|
"https://alpha.ultralytics.com/api/webhooks/training/metrics",
|
|
68
|
-
json=
|
|
154
|
+
json=payload,
|
|
69
155
|
headers={"Authorization": f"Bearer {_api_key}"},
|
|
70
156
|
timeout=10,
|
|
71
|
-
)
|
|
157
|
+
)
|
|
158
|
+
r.raise_for_status()
|
|
159
|
+
return r.json()
|
|
72
160
|
except Exception as e:
|
|
73
161
|
LOGGER.debug(f"Platform: Failed to send {event}: {e}")
|
|
162
|
+
return None
|
|
74
163
|
|
|
75
164
|
|
|
76
|
-
def _send_async(event, data, project, name):
|
|
165
|
+
def _send_async(event, data, project, name, model_id=None):
|
|
77
166
|
"""Send event asynchronously using bounded thread pool."""
|
|
78
|
-
_executor.submit(_send, event, data, project, name)
|
|
167
|
+
_executor.submit(_send, event, data, project, name, model_id)
|
|
79
168
|
|
|
80
169
|
|
|
81
170
|
def _upload_model(model_path, project, name):
|
|
@@ -169,132 +258,146 @@ def _get_environment_info():
|
|
|
169
258
|
return env
|
|
170
259
|
|
|
171
260
|
|
|
261
|
+
def _get_project_name(trainer):
|
|
262
|
+
"""Get slugified project and name from trainer args."""
|
|
263
|
+
raw = str(trainer.args.project)
|
|
264
|
+
parts = raw.split("/", 1)
|
|
265
|
+
project = f"{parts[0]}/{slugify(parts[1])}" if len(parts) == 2 else slugify(raw)
|
|
266
|
+
return project, slugify(str(trainer.args.name or "train"))
|
|
267
|
+
|
|
268
|
+
|
|
172
269
|
def on_pretrain_routine_start(trainer):
|
|
173
270
|
"""Initialize Platform logging at training start."""
|
|
174
|
-
global _console_logger, _last_upload
|
|
175
|
-
|
|
176
271
|
if RANK not in {-1, 0} or not trainer.args.project:
|
|
177
272
|
return
|
|
178
273
|
|
|
179
|
-
#
|
|
180
|
-
|
|
274
|
+
# Per-trainer state to isolate concurrent training runs
|
|
275
|
+
trainer._platform_model_id = None
|
|
276
|
+
trainer._platform_last_upload = time()
|
|
181
277
|
|
|
182
|
-
project, name =
|
|
278
|
+
project, name = _get_project_name(trainer)
|
|
183
279
|
url = f"https://alpha.ultralytics.com/{project}/{name}"
|
|
184
280
|
LOGGER.info(f"{PREFIX}Streaming to {url}")
|
|
185
281
|
|
|
186
282
|
# Create callback to send console output to Platform
|
|
187
283
|
def send_console_output(content, line_count, chunk_id):
|
|
188
284
|
"""Send batched console output to Platform webhook."""
|
|
189
|
-
_send_async(
|
|
285
|
+
_send_async(
|
|
286
|
+
"console_output",
|
|
287
|
+
{"chunkId": chunk_id, "content": content, "lineCount": line_count},
|
|
288
|
+
project,
|
|
289
|
+
name,
|
|
290
|
+
getattr(trainer, "_platform_model_id", None),
|
|
291
|
+
)
|
|
190
292
|
|
|
191
293
|
# Start console capture with batching (5 lines or 5 seconds)
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
# Gather model info for richer metadata
|
|
196
|
-
model_info = {}
|
|
197
|
-
try:
|
|
198
|
-
info = model_info_for_loggers(trainer)
|
|
199
|
-
model_info = {
|
|
200
|
-
"parameters": info.get("model/parameters", 0),
|
|
201
|
-
"gflops": info.get("model/GFLOPs", 0),
|
|
202
|
-
"classes": getattr(trainer.model, "yaml", {}).get("nc", 0), # number of classes
|
|
203
|
-
}
|
|
204
|
-
except Exception:
|
|
205
|
-
pass
|
|
294
|
+
trainer._platform_console_logger = ConsoleLogger(batch_size=5, flush_interval=5.0, on_flush=send_console_output)
|
|
295
|
+
trainer._platform_console_logger.start_capture()
|
|
206
296
|
|
|
207
297
|
# Collect environment info (W&B-style metadata)
|
|
208
298
|
environment = _get_environment_info()
|
|
209
299
|
|
|
210
|
-
|
|
300
|
+
# Build trainArgs - callback runs before get_dataset() so args.data is still original (e.g., ul:// URIs)
|
|
301
|
+
# Note: model_info is sent later in on_fit_epoch_end (epoch 0) when the model is actually loaded
|
|
302
|
+
train_args = {k: str(v) for k, v in vars(trainer.args).items()}
|
|
303
|
+
|
|
304
|
+
# Send synchronously to get modelId for subsequent webhooks
|
|
305
|
+
response = _send(
|
|
211
306
|
"training_started",
|
|
212
307
|
{
|
|
213
|
-
"trainArgs":
|
|
308
|
+
"trainArgs": train_args,
|
|
214
309
|
"epochs": trainer.epochs,
|
|
215
310
|
"device": str(trainer.device),
|
|
216
|
-
"modelInfo": model_info,
|
|
217
311
|
"environment": environment,
|
|
218
312
|
},
|
|
219
313
|
project,
|
|
220
314
|
name,
|
|
221
315
|
)
|
|
316
|
+
if response and response.get("modelId"):
|
|
317
|
+
trainer._platform_model_id = response["modelId"]
|
|
222
318
|
|
|
223
319
|
|
|
224
320
|
def on_fit_epoch_end(trainer):
|
|
225
321
|
"""Log training and system metrics at epoch end."""
|
|
226
|
-
global _system_logger
|
|
227
|
-
|
|
228
322
|
if RANK not in {-1, 0} or not trainer.args.project:
|
|
229
323
|
return
|
|
230
324
|
|
|
231
|
-
project, name =
|
|
325
|
+
project, name = _get_project_name(trainer)
|
|
232
326
|
metrics = {**trainer.label_loss_items(trainer.tloss, prefix="train"), **trainer.metrics}
|
|
233
327
|
|
|
234
328
|
if trainer.optimizer and trainer.optimizer.param_groups:
|
|
235
329
|
metrics["lr"] = trainer.optimizer.param_groups[0]["lr"]
|
|
330
|
+
|
|
331
|
+
# Extract model info at epoch 0 (sent as separate field, not in metrics)
|
|
332
|
+
model_info = None
|
|
236
333
|
if trainer.epoch == 0:
|
|
237
334
|
try:
|
|
238
|
-
|
|
335
|
+
info = model_info_for_loggers(trainer)
|
|
336
|
+
model_info = {
|
|
337
|
+
"parameters": info.get("model/parameters", 0),
|
|
338
|
+
"gflops": info.get("model/GFLOPs", 0),
|
|
339
|
+
"speedMs": info.get("model/speed_PyTorch(ms)", 0),
|
|
340
|
+
}
|
|
239
341
|
except Exception:
|
|
240
342
|
pass
|
|
241
343
|
|
|
242
|
-
# Get system metrics (cache SystemLogger for efficiency)
|
|
344
|
+
# Get system metrics (cache SystemLogger on trainer for efficiency)
|
|
243
345
|
system = {}
|
|
244
346
|
try:
|
|
245
|
-
if
|
|
246
|
-
|
|
247
|
-
system =
|
|
347
|
+
if not hasattr(trainer, "_platform_system_logger"):
|
|
348
|
+
trainer._platform_system_logger = SystemLogger()
|
|
349
|
+
system = trainer._platform_system_logger.get_metrics(rates=True)
|
|
248
350
|
except Exception:
|
|
249
351
|
pass
|
|
250
352
|
|
|
353
|
+
payload = {
|
|
354
|
+
"epoch": trainer.epoch,
|
|
355
|
+
"metrics": metrics,
|
|
356
|
+
"system": system,
|
|
357
|
+
"fitness": trainer.fitness,
|
|
358
|
+
"best_fitness": trainer.best_fitness,
|
|
359
|
+
}
|
|
360
|
+
if model_info:
|
|
361
|
+
payload["modelInfo"] = model_info
|
|
362
|
+
|
|
251
363
|
_send_async(
|
|
252
364
|
"epoch_end",
|
|
253
|
-
|
|
254
|
-
"epoch": trainer.epoch,
|
|
255
|
-
"metrics": metrics,
|
|
256
|
-
"system": system,
|
|
257
|
-
"fitness": trainer.fitness,
|
|
258
|
-
"best_fitness": trainer.best_fitness,
|
|
259
|
-
},
|
|
365
|
+
payload,
|
|
260
366
|
project,
|
|
261
367
|
name,
|
|
368
|
+
getattr(trainer, "_platform_model_id", None),
|
|
262
369
|
)
|
|
263
370
|
|
|
264
371
|
|
|
265
372
|
def on_model_save(trainer):
|
|
266
373
|
"""Upload model checkpoint (rate limited to every 15 min)."""
|
|
267
|
-
global _last_upload
|
|
268
|
-
|
|
269
374
|
if RANK not in {-1, 0} or not trainer.args.project:
|
|
270
375
|
return
|
|
271
376
|
|
|
272
377
|
# Rate limit to every 15 minutes (900 seconds)
|
|
273
|
-
if time() -
|
|
378
|
+
if time() - getattr(trainer, "_platform_last_upload", 0) < 900:
|
|
274
379
|
return
|
|
275
380
|
|
|
276
381
|
model_path = trainer.best if trainer.best and Path(trainer.best).exists() else trainer.last
|
|
277
382
|
if not model_path:
|
|
278
383
|
return
|
|
279
384
|
|
|
280
|
-
project, name =
|
|
385
|
+
project, name = _get_project_name(trainer)
|
|
281
386
|
_upload_model_async(model_path, project, name)
|
|
282
|
-
|
|
387
|
+
trainer._platform_last_upload = time()
|
|
283
388
|
|
|
284
389
|
|
|
285
390
|
def on_train_end(trainer):
|
|
286
391
|
"""Log final results, upload best model, and send validation plot data."""
|
|
287
|
-
global _console_logger
|
|
288
|
-
|
|
289
392
|
if RANK not in {-1, 0} or not trainer.args.project:
|
|
290
393
|
return
|
|
291
394
|
|
|
292
|
-
project, name =
|
|
395
|
+
project, name = _get_project_name(trainer)
|
|
293
396
|
|
|
294
397
|
# Stop console capture
|
|
295
|
-
if
|
|
296
|
-
|
|
297
|
-
|
|
398
|
+
if hasattr(trainer, "_platform_console_logger") and trainer._platform_console_logger:
|
|
399
|
+
trainer._platform_console_logger.stop_capture()
|
|
400
|
+
trainer._platform_console_logger = None
|
|
298
401
|
|
|
299
402
|
# Upload best model (blocking to ensure it completes)
|
|
300
403
|
model_path = None
|
|
@@ -332,6 +435,7 @@ def on_train_end(trainer):
|
|
|
332
435
|
},
|
|
333
436
|
project,
|
|
334
437
|
name,
|
|
438
|
+
getattr(trainer, "_platform_model_id", None),
|
|
335
439
|
)
|
|
336
440
|
url = f"https://alpha.ultralytics.com/{project}/{name}"
|
|
337
441
|
LOGGER.info(f"{PREFIX}View results at {url}")
|
ultralytics/utils/checks.py
CHANGED
|
@@ -592,7 +592,7 @@ def check_file(file, suffix="", download=True, download_dir=".", hard=True):
|
|
|
592
592
|
"""Search/download file (if necessary), check suffix (if provided), and return path.
|
|
593
593
|
|
|
594
594
|
Args:
|
|
595
|
-
file (str): File name or path.
|
|
595
|
+
file (str): File name or path, URL, platform URI (ul://), or GCS path (gs://).
|
|
596
596
|
suffix (str | tuple): Acceptable suffix or tuple of suffixes to validate against the file.
|
|
597
597
|
download (bool): Whether to download the file if it doesn't exist locally.
|
|
598
598
|
download_dir (str): Directory to download the file to.
|
|
@@ -610,7 +610,26 @@ def check_file(file, suffix="", download=True, download_dir=".", hard=True):
|
|
|
610
610
|
or file.lower().startswith("grpc://")
|
|
611
611
|
): # file exists or gRPC Triton images
|
|
612
612
|
return file
|
|
613
|
-
elif download and file.lower().startswith(
|
|
613
|
+
elif download and file.lower().startswith("ul://"): # Ultralytics Platform URI
|
|
614
|
+
from ultralytics.utils.callbacks.platform import resolve_platform_uri
|
|
615
|
+
|
|
616
|
+
url = resolve_platform_uri(file, hard=hard) # Convert to signed HTTPS URL
|
|
617
|
+
if url is None:
|
|
618
|
+
return [] # Not found, soft fail (consistent with file search behavior)
|
|
619
|
+
# Use URI path for unique directory structure: ul://user/project/model -> user/project/model/filename.pt
|
|
620
|
+
uri_path = file[5:] # Remove "ul://"
|
|
621
|
+
local_file = Path(download_dir) / uri_path / url2file(url)
|
|
622
|
+
if local_file.exists():
|
|
623
|
+
LOGGER.info(f"Found {clean_url(url)} locally at {local_file}")
|
|
624
|
+
else:
|
|
625
|
+
local_file.parent.mkdir(parents=True, exist_ok=True)
|
|
626
|
+
downloads.safe_download(url=url, file=local_file, unzip=False)
|
|
627
|
+
return str(local_file)
|
|
628
|
+
elif download and file.lower().startswith(
|
|
629
|
+
("https://", "http://", "rtsp://", "rtmp://", "tcp://", "gs://")
|
|
630
|
+
): # download
|
|
631
|
+
if file.startswith("gs://"):
|
|
632
|
+
file = "https://storage.googleapis.com/" + file[5:] # convert gs:// to public HTTPS URL
|
|
614
633
|
url = file # warning: Pathlib turns :// -> :/
|
|
615
634
|
file = Path(download_dir) / url2file(file) # '%2F' to '/', split https://url.com/file.txt?auth
|
|
616
635
|
if file.exists():
|
|
@@ -945,7 +964,7 @@ def is_rockchip():
|
|
|
945
964
|
with open("/proc/device-tree/compatible") as f:
|
|
946
965
|
dev_str = f.read()
|
|
947
966
|
*_, soc = dev_str.split(",")
|
|
948
|
-
if soc.replace("\x00", "") in RKNN_CHIPS:
|
|
967
|
+
if soc.replace("\x00", "").split("-", 1)[0] in RKNN_CHIPS:
|
|
949
968
|
return True
|
|
950
969
|
except OSError:
|
|
951
970
|
return False
|
ultralytics/utils/export/imx.py
CHANGED
|
@@ -219,7 +219,7 @@ def torch2imx(
|
|
|
219
219
|
Examples:
|
|
220
220
|
>>> from ultralytics import YOLO
|
|
221
221
|
>>> model = YOLO("yolo11n.pt")
|
|
222
|
-
>>> path, _ = export_imx(model, "model.imx", conf=0.25, iou=0.
|
|
222
|
+
>>> path, _ = export_imx(model, "model.imx", conf=0.25, iou=0.7, max_det=300)
|
|
223
223
|
|
|
224
224
|
Notes:
|
|
225
225
|
- Requires model_compression_toolkit, onnx, edgemdt_tpc, and edge-mdt-cl packages
|
ultralytics/utils/metrics.py
CHANGED
|
@@ -315,7 +315,7 @@ class ConfusionMatrix(DataExportMixin):
|
|
|
315
315
|
matches (dict): Contains the indices of ground truths and predictions categorized into TP, FP and FN.
|
|
316
316
|
"""
|
|
317
317
|
|
|
318
|
-
def __init__(self, names: dict[int, str] =
|
|
318
|
+
def __init__(self, names: dict[int, str] = {}, task: str = "detect", save_matches: bool = False):
|
|
319
319
|
"""Initialize a ConfusionMatrix instance.
|
|
320
320
|
|
|
321
321
|
Args:
|
ultralytics/utils/plotting.py
CHANGED
|
@@ -972,6 +972,9 @@ def plot_tune_results(csv_file: str = "tune_results.csv", exclude_zero_fitness_p
|
|
|
972
972
|
if exclude_zero_fitness_points:
|
|
973
973
|
mask = fitness > 0 # exclude zero-fitness points
|
|
974
974
|
x, fitness = x[mask], fitness[mask]
|
|
975
|
+
if len(fitness) == 0:
|
|
976
|
+
LOGGER.warning("No valid fitness values to plot (all iterations may have failed)")
|
|
977
|
+
return
|
|
975
978
|
# Iterative sigma rejection on lower bound only
|
|
976
979
|
for _ in range(3): # max 3 iterations
|
|
977
980
|
mean, std = fitness.mean(), fitness.std()
|
ultralytics/utils/tal.py
CHANGED
|
@@ -354,7 +354,8 @@ def make_anchors(feats, strides, grid_cell_offset=0.5):
|
|
|
354
354
|
anchor_points, stride_tensor = [], []
|
|
355
355
|
assert feats is not None
|
|
356
356
|
dtype, device = feats[0].dtype, feats[0].device
|
|
357
|
-
for i
|
|
357
|
+
for i in range(len(feats)): # use len(feats) to avoid TracerWarning from iterating over strides tensor
|
|
358
|
+
stride = strides[i]
|
|
358
359
|
h, w = feats[i].shape[2:] if isinstance(feats, list) else (int(feats[i][0]), int(feats[i][1]))
|
|
359
360
|
sx = torch.arange(end=w, device=device, dtype=dtype) + grid_cell_offset # shift x
|
|
360
361
|
sy = torch.arange(end=h, device=device, dtype=dtype) + grid_cell_offset # shift y
|
ultralytics/utils/torch_utils.py
CHANGED
|
@@ -157,7 +157,7 @@ def select_device(device="", newline=False, verbose=True):
|
|
|
157
157
|
Notes:
|
|
158
158
|
Sets the 'CUDA_VISIBLE_DEVICES' environment variable for specifying which GPUs to use.
|
|
159
159
|
"""
|
|
160
|
-
if isinstance(device, torch.device) or str(device).startswith(("tpu", "intel")):
|
|
160
|
+
if isinstance(device, torch.device) or str(device).startswith(("tpu", "intel", "vulkan")):
|
|
161
161
|
return device
|
|
162
162
|
|
|
163
163
|
s = f"Ultralytics {__version__} 🚀 Python-{PYTHON_VERSION} torch-{TORCH_VERSION} "
|
ultralytics/utils/tqdm.py
CHANGED
|
@@ -317,7 +317,10 @@ class TQDM:
|
|
|
317
317
|
# Final display
|
|
318
318
|
if self.total and self.n >= self.total:
|
|
319
319
|
self.n = self.total
|
|
320
|
-
|
|
320
|
+
if self.n != self.last_print_n: # Skip if 100% already shown
|
|
321
|
+
self._display(final=True)
|
|
322
|
+
else:
|
|
323
|
+
self._display(final=True)
|
|
321
324
|
|
|
322
325
|
# Cleanup
|
|
323
326
|
if self.leave:
|
ultralytics/utils/tuner.py
CHANGED
|
@@ -35,9 +35,6 @@ def run_ray_tune(
|
|
|
35
35
|
>>> result_grid = model.tune(data="coco8.yaml", use_ray=True)
|
|
36
36
|
"""
|
|
37
37
|
LOGGER.info("💡 Learn about RayTune at https://docs.ultralytics.com/integrations/ray-tune")
|
|
38
|
-
if train_args is None:
|
|
39
|
-
train_args = {}
|
|
40
|
-
|
|
41
38
|
try:
|
|
42
39
|
checks.check_requirements("ray[tune]")
|
|
43
40
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ultralytics-opencv-headless
|
|
3
|
-
Version: 8.3.
|
|
3
|
+
Version: 8.3.253
|
|
4
4
|
Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
|
|
5
5
|
Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
|
|
6
6
|
Maintainer-email: Ultralytics <hello@ultralytics.com>
|
|
@@ -7,11 +7,11 @@ tests/test_exports.py,sha256=5G5EgDmars6d-N7TVnJdDFWId0IJs-yw03DvdQIjrNU,14246
|
|
|
7
7
|
tests/test_integrations.py,sha256=6QgSh9n0J04RdUYz08VeVOnKmf4S5MDEQ0chzS7jo_c,6220
|
|
8
8
|
tests/test_python.py,sha256=viMvRajIbDZdm64hRRg9i8qZ1sU9frwB69e56mxwEXk,29266
|
|
9
9
|
tests/test_solutions.py,sha256=CIaphpmOXgz9AE9xcm1RWODKrwGfZLCc84IggGXArNM,14122
|
|
10
|
-
ultralytics/__init__.py,sha256=
|
|
10
|
+
ultralytics/__init__.py,sha256=cqnBNS4T-JWe8DwcLEN_GwX_fpL1gxiWwY8LjwC8sEo,1302
|
|
11
11
|
ultralytics/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
|
|
12
12
|
ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
|
|
13
13
|
ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
|
|
14
|
-
ultralytics/cfg/__init__.py,sha256=
|
|
14
|
+
ultralytics/cfg/__init__.py,sha256=sJfreQYmFkCaW9eWex-Um1tG-4zRpC2Q7GuJAWBrFpk,40401
|
|
15
15
|
ultralytics/cfg/default.yaml,sha256=KKENSHolDSto1HJVGjBvTXvz9ae-XMcYRzKrjU3QfZc,8912
|
|
16
16
|
ultralytics/cfg/datasets/Argoverse.yaml,sha256=QGpdh3Hj5dFrvbsaE_8rAVj9BO4XpKTB7uhXaTTnE-o,3364
|
|
17
17
|
ultralytics/cfg/datasets/DOTAv1.5.yaml,sha256=KE7VC-ZMDSei1pLPm-pdk_ZAMRU_gLwGgtIQNbwp6dA,1212
|
|
@@ -21,6 +21,7 @@ ultralytics/cfg/datasets/HomeObjects-3K.yaml,sha256=xEtSqEad-rtfGuIrERjjhdISggmP
|
|
|
21
21
|
ultralytics/cfg/datasets/ImageNet.yaml,sha256=N9NHhIgnlNIBqZZbzQZAW3aCnz6RSXQABnopaDs5BmE,42529
|
|
22
22
|
ultralytics/cfg/datasets/Objects365.yaml,sha256=8Bl-NAm0mlMW8EfMsz39JZo-HCvmp0ejJXaMeoHTpqw,9649
|
|
23
23
|
ultralytics/cfg/datasets/SKU-110K.yaml,sha256=xvRkq3SdDOwBA91U85bln7HTXkod5MvFX6pt1PxTjJE,2609
|
|
24
|
+
ultralytics/cfg/datasets/TT100K.yaml,sha256=qrJ6nrZdvrMy5ov9FaHn-pFI8hJn_WLYaB60vhtCOxs,6918
|
|
24
25
|
ultralytics/cfg/datasets/VOC.yaml,sha256=XpaegRHjp7xZnenOuA9zgg2lQURSL-o7mLQwzIKKuqM,3803
|
|
25
26
|
ultralytics/cfg/datasets/VisDrone.yaml,sha256=PfudojW5av_5q-dC9VsG_xhvuv9cTGEpRp4loXCJ4Ng,3397
|
|
26
27
|
ultralytics/cfg/datasets/african-wildlife.yaml,sha256=6UfO_gnwJEDVq05p72IMJfkTIKZlXKNLSeKru-JyTrQ,915
|
|
@@ -122,12 +123,12 @@ ultralytics/data/scripts/get_coco.sh,sha256=UuJpJeo3qQpTHVINeOpmP0NYmg8PhEFE3A8J
|
|
|
122
123
|
ultralytics/data/scripts/get_coco128.sh,sha256=qmRQl_hOKrsdHrTrnyQuFIH01oDz3lfaz138OgGfLt8,650
|
|
123
124
|
ultralytics/data/scripts/get_imagenet.sh,sha256=hr42H16bM47iT27rgS7MpEo-GeOZAYUQXgr0B2cwn48,1705
|
|
124
125
|
ultralytics/engine/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
|
|
125
|
-
ultralytics/engine/exporter.py,sha256=
|
|
126
|
-
ultralytics/engine/model.py,sha256=
|
|
126
|
+
ultralytics/engine/exporter.py,sha256=Lvs2vHcBP7YeHxYhyD7dvEshnNeYn5IzRHhdA7VRfbY,72997
|
|
127
|
+
ultralytics/engine/model.py,sha256=1Ex0Q7XOwWWtTsTMk-7O4wWiA2cYGayKJwB3zDC1XTg,53223
|
|
127
128
|
ultralytics/engine/predictor.py,sha256=neYmNDX27Vv3ggk9xqaKlH6XzB2vlFIghU5o7ZC0zFo,22838
|
|
128
129
|
ultralytics/engine/results.py,sha256=DomI01voqR_i7v8LhDGb6jWCprWB4H6I436GSO2NMBY,68030
|
|
129
|
-
ultralytics/engine/trainer.py,sha256=
|
|
130
|
-
ultralytics/engine/tuner.py,sha256=
|
|
130
|
+
ultralytics/engine/trainer.py,sha256=riVwjf_4uhrkH5TYjAvRQmIerNT7pxPBM8jWA60oF-A,45851
|
|
131
|
+
ultralytics/engine/tuner.py,sha256=xZGIYwpQVdnzQcdEmLc70eQy7G7swQQEgdDGxoBLmHY,21570
|
|
131
132
|
ultralytics/engine/validator.py,sha256=2rqdVt4hB9ruMJq-L7PbaCNFwuERS7ZHdVSg91RM3wk,17761
|
|
132
133
|
ultralytics/hub/__init__.py,sha256=Z0K_E00jzQh90b18q3IDChwVmTvyIYp6C00sCV-n2F8,6709
|
|
133
134
|
ultralytics/hub/auth.py,sha256=ANzCeZA7lUzTWc_sFHbDuuyBh1jLl2sTpHkoUbIkFYE,6254
|
|
@@ -209,7 +210,7 @@ ultralytics/models/yolo/yoloe/train.py,sha256=giX6zDu5Z3z48PCaBHzu7v9NH3BrpUaGAY
|
|
|
209
210
|
ultralytics/models/yolo/yoloe/train_seg.py,sha256=0hRByMXsEJA-J2B1wXDMVhiW9f9MOTj3LlrGTibN6Ww,4919
|
|
210
211
|
ultralytics/models/yolo/yoloe/val.py,sha256=utUFWeFKRFWZrPr1y3A8ztbTwdoWMYqzlwBN7CQ0tCA,9418
|
|
211
212
|
ultralytics/nn/__init__.py,sha256=538LZPUKKvc3JCMgiQ4VLGqRN2ZAaVLFcQbeNNHFkEA,545
|
|
212
|
-
ultralytics/nn/autobackend.py,sha256=
|
|
213
|
+
ultralytics/nn/autobackend.py,sha256=NOp-hhkx1V-I6JgjloNZYek_kMGdPhVyiWHbcU0J2qI,45135
|
|
213
214
|
ultralytics/nn/tasks.py,sha256=nHhP3R8r17K_pHSfGXwDAPEwUyV0sbqzkSHjeZ2PRkg,70418
|
|
214
215
|
ultralytics/nn/text_model.py,sha256=novnuosqXnW1NmlOzWOk7dEKuN6Vq40CTksr6hI3Knc,15109
|
|
215
216
|
ultralytics/nn/modules/__init__.py,sha256=5Sg_28MDfKwdu14Ty_WCaiIXZyjBSQ-xCNCwnoz_w-w,3198
|
|
@@ -227,7 +228,7 @@ ultralytics/solutions/distance_calculation.py,sha256=RcpRDodEHAJUug9tobtQKt5_byS
|
|
|
227
228
|
ultralytics/solutions/heatmap.py,sha256=DUyV5UFsOwZ8ArN4BtW8Vm3ps8_VZXc6VP0uiKyGDWY,5481
|
|
228
229
|
ultralytics/solutions/instance_segmentation.py,sha256=eggk1uWCZ-6cp0YfxCGVUwnKS6xqJua946oxafjAXGk,3778
|
|
229
230
|
ultralytics/solutions/object_blurrer.py,sha256=EZrv3oU68kEaahAxlhk9cF5ZKFtoVaW8bDB4Css9xe0,3981
|
|
230
|
-
ultralytics/solutions/object_counter.py,sha256=
|
|
231
|
+
ultralytics/solutions/object_counter.py,sha256=OpMSLlenDK-cLvCgCOoKbqMXIZrngyqP8DP6ZeEnWL8,9355
|
|
231
232
|
ultralytics/solutions/object_cropper.py,sha256=WRbrfXAR5aD6PQBqJ-BvcVaiaqta_9YeTlXN2dY274s,3510
|
|
232
233
|
ultralytics/solutions/parking_management.py,sha256=FQKeLEiwnTmRcXqsNOlOt9GTFPjkyvnE5pwwKnneJa4,13770
|
|
233
234
|
ultralytics/solutions/queue_management.py,sha256=NlVX6PMEaffjoZjfQrVyayaDUdtc0JF8GzTQrZFjpCg,4371
|
|
@@ -252,8 +253,8 @@ ultralytics/trackers/utils/matching.py,sha256=x6uZOIx0O9oVmAcfY6tYMTJQE2cDTUlRR6
|
|
|
252
253
|
ultralytics/utils/__init__.py,sha256=JfvODTB4mG_JOhTeCiPtq0iCEgiCh14hJf195rnOhLQ,55145
|
|
253
254
|
ultralytics/utils/autobatch.py,sha256=jiE4m_--H9UkXFDm_FqzcZk_hSTCGpS72XdVEKgZwAo,5114
|
|
254
255
|
ultralytics/utils/autodevice.py,sha256=rXlPuo-iX-vZ4BabmMGEGh9Uxpau4R7Zlt1KCo9Xfyc,8892
|
|
255
|
-
ultralytics/utils/benchmarks.py,sha256=
|
|
256
|
-
ultralytics/utils/checks.py,sha256=
|
|
256
|
+
ultralytics/utils/benchmarks.py,sha256=KOFm2AZPehrJajbUu6NTdZoVOFjTpLhUUnfL59sC60w,32293
|
|
257
|
+
ultralytics/utils/checks.py,sha256=DheB1ip9ba7ZW_fjPieNdx98vZpwUDbnCKmavAIzJL4,39411
|
|
257
258
|
ultralytics/utils/cpu.py,sha256=OksKOlX93AsbSsFuoYvLXRXgpkOibrZSwQyW6lipt4Q,3493
|
|
258
259
|
ultralytics/utils/dist.py,sha256=hOuY1-unhQAY-uWiZw3LWw36d1mqJuYK75NdlwB4oKE,4131
|
|
259
260
|
ultralytics/utils/downloads.py,sha256=IyiGjjXqOyf1B0qLMk7vE6sSQ8s232OhKS8aj9XbTgs,22883
|
|
@@ -264,16 +265,16 @@ ultralytics/utils/git.py,sha256=UdqeIiiEzg1qkerAZrg5YtTYPuJYwrpxW9N_6Pq6s8U,5501
|
|
|
264
265
|
ultralytics/utils/instance.py,sha256=11mhefvTI9ftMqSirXuiViAi0Fxlo6v84qvNxfRNUoE,18862
|
|
265
266
|
ultralytics/utils/logger.py,sha256=T5iaNnaqbCvx_FZf1dhVkr5FVxyxb4vO17t4SJfCIhg,19132
|
|
266
267
|
ultralytics/utils/loss.py,sha256=t-z7qkvqF8OtuRHrj2wmvClZV2CCumIRi9jnqkc9i_A,39573
|
|
267
|
-
ultralytics/utils/metrics.py,sha256=
|
|
268
|
+
ultralytics/utils/metrics.py,sha256=SpyMGnuRwwmorJqSdUsDQquVpGmgfj1X3PNDiw_ZZWM,69152
|
|
268
269
|
ultralytics/utils/nms.py,sha256=zv1rOzMF6WU8Kdk41VzNf1H1EMt_vZHcbDFbg3mnN2o,14248
|
|
269
270
|
ultralytics/utils/ops.py,sha256=nWvTLJSBeW_XrxCy5Ytxl7sZJHp2sRqyCv4mm8QwYnw,25797
|
|
270
271
|
ultralytics/utils/patches.py,sha256=mD3slAMAhcezzP42_fOWmacNMU6zXB68Br4_EBCyIjs,7117
|
|
271
|
-
ultralytics/utils/plotting.py,sha256=
|
|
272
|
-
ultralytics/utils/tal.py,sha256=
|
|
273
|
-
ultralytics/utils/torch_utils.py,sha256=
|
|
274
|
-
ultralytics/utils/tqdm.py,sha256=
|
|
272
|
+
ultralytics/utils/plotting.py,sha256=_iXs4gs8tzMSgiKxCriD4un-MJkOsC3lGSy0wn7qZGk,48433
|
|
273
|
+
ultralytics/utils/tal.py,sha256=iabLTij-MVyKxrkwhIOC1ouRB5Iy80Zp5H8aoYjvJJY,20773
|
|
274
|
+
ultralytics/utils/torch_utils.py,sha256=dHvLaQopIOr9NcIWkLWPX36f5OAFR4thcqm379Zayfc,40278
|
|
275
|
+
ultralytics/utils/tqdm.py,sha256=f2W608Qpvgu6tFi28qylaZpcRv3IX8wTGY_8lgicaqY,16343
|
|
275
276
|
ultralytics/utils/triton.py,sha256=BQu3CD3OlT76d1OtmnX5slQU37VC1kzRvEtfI2saIQA,5211
|
|
276
|
-
ultralytics/utils/tuner.py,sha256=
|
|
277
|
+
ultralytics/utils/tuner.py,sha256=1PM7G89X95Yfmhskk8LBXU8T-Bfiln1Ajbnz2lkgvAI,7303
|
|
277
278
|
ultralytics/utils/callbacks/__init__.py,sha256=hzL63Rce6VkZhP4Lcim9LKjadixaQG86nKqPhk7IkS0,242
|
|
278
279
|
ultralytics/utils/callbacks/base.py,sha256=floD31JHqHpiVabQiE76_hzC_j7KjtL4w_czkD1bLKc,6883
|
|
279
280
|
ultralytics/utils/callbacks/clearml.py,sha256=LjfNe4mswceCOpEGVLxqGXjkl_XGbef4awdcp4502RU,5831
|
|
@@ -282,17 +283,17 @@ ultralytics/utils/callbacks/dvc.py,sha256=YT0Sa5P8Huj8Fn9jM2P6MYzUY3PIVxsa5BInVi
|
|
|
282
283
|
ultralytics/utils/callbacks/hub.py,sha256=fVLqqr3ZM6hoYFlVMEeejfq1MWDrkWCskPFOG3HGILQ,4159
|
|
283
284
|
ultralytics/utils/callbacks/mlflow.py,sha256=wCXjQgdufp9LYujqMzLZOmIOur6kvrApHNeo9dA7t_g,5323
|
|
284
285
|
ultralytics/utils/callbacks/neptune.py,sha256=_vt3cMwDHCR-LyT3KtRikGpj6AG11oQ-skUUUUdZ74o,4391
|
|
285
|
-
ultralytics/utils/callbacks/platform.py,sha256=
|
|
286
|
+
ultralytics/utils/callbacks/platform.py,sha256=eFPP5vgwGhGb0lHbJgaU24JDz8l6vEO9qQuzUIYhSsU,15977
|
|
286
287
|
ultralytics/utils/callbacks/raytune.py,sha256=Y0dFyNZVRuFovSh7nkgUIHTQL3xIXOACElgHuYbg_5I,1278
|
|
287
288
|
ultralytics/utils/callbacks/tensorboard.py,sha256=PTJYvD2gqRUN8xw5VoTjvKnu2adukLfvhMlDgTnTiFU,4952
|
|
288
289
|
ultralytics/utils/callbacks/wb.py,sha256=ghmL3gigOa-z_F54-TzMraKw9MAaYX-Wk4H8dLoRvX8,7705
|
|
289
290
|
ultralytics/utils/export/__init__.py,sha256=Cfh-PwVfTF_lwPp-Ss4wiX4z8Sm1XRPklsqdFfmTZ30,333
|
|
290
291
|
ultralytics/utils/export/engine.py,sha256=23-lC6dNsmz5vprSJzaN7UGNXrFlVedNcqhlOH_IXes,9956
|
|
291
|
-
ultralytics/utils/export/imx.py,sha256=
|
|
292
|
+
ultralytics/utils/export/imx.py,sha256=0TNooKXzMagOMQxGxj90kEOAHrycQNNSLMdRQH-SJ30,13299
|
|
292
293
|
ultralytics/utils/export/tensorflow.py,sha256=igYzwbdblb9YgfV4Jgl5lMvynuVRcF51dAzI7j-BBI0,9966
|
|
293
|
-
ultralytics_opencv_headless-8.3.
|
|
294
|
-
ultralytics_opencv_headless-8.3.
|
|
295
|
-
ultralytics_opencv_headless-8.3.
|
|
296
|
-
ultralytics_opencv_headless-8.3.
|
|
297
|
-
ultralytics_opencv_headless-8.3.
|
|
298
|
-
ultralytics_opencv_headless-8.3.
|
|
294
|
+
ultralytics_opencv_headless-8.3.253.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
|
|
295
|
+
ultralytics_opencv_headless-8.3.253.dist-info/METADATA,sha256=AGSNDwYCFqjotUhh5jv3QD8wixw3FG_8FRKtw9b4o_k,37728
|
|
296
|
+
ultralytics_opencv_headless-8.3.253.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
297
|
+
ultralytics_opencv_headless-8.3.253.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
|
|
298
|
+
ultralytics_opencv_headless-8.3.253.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
|
|
299
|
+
ultralytics_opencv_headless-8.3.253.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|