ultralytics-opencv-headless 8.3.248__py3-none-any.whl → 8.3.251__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ultralytics/__init__.py +1 -1
- ultralytics/cfg/__init__.py +6 -4
- ultralytics/cfg/datasets/TT100K.yaml +346 -0
- ultralytics/engine/model.py +4 -3
- ultralytics/engine/trainer.py +28 -25
- ultralytics/engine/tuner.py +1 -0
- ultralytics/solutions/object_counter.py +1 -1
- ultralytics/utils/benchmarks.py +1 -1
- ultralytics/utils/callbacks/platform.py +142 -55
- ultralytics/utils/checks.py +22 -3
- ultralytics/utils/metrics.py +1 -1
- ultralytics/utils/plotting.py +3 -0
- ultralytics/utils/tuner.py +0 -3
- {ultralytics_opencv_headless-8.3.248.dist-info → ultralytics_opencv_headless-8.3.251.dist-info}/METADATA +1 -1
- {ultralytics_opencv_headless-8.3.248.dist-info → ultralytics_opencv_headless-8.3.251.dist-info}/RECORD +19 -18
- {ultralytics_opencv_headless-8.3.248.dist-info → ultralytics_opencv_headless-8.3.251.dist-info}/WHEEL +0 -0
- {ultralytics_opencv_headless-8.3.248.dist-info → ultralytics_opencv_headless-8.3.251.dist-info}/entry_points.txt +0 -0
- {ultralytics_opencv_headless-8.3.248.dist-info → ultralytics_opencv_headless-8.3.251.dist-info}/licenses/LICENSE +0 -0
- {ultralytics_opencv_headless-8.3.248.dist-info → ultralytics_opencv_headless-8.3.251.dist-info}/top_level.txt +0 -0
ultralytics/__init__.py
CHANGED
ultralytics/cfg/__init__.py
CHANGED
|
@@ -410,9 +410,11 @@ def get_save_dir(args: SimpleNamespace, name: str | None = None) -> Path:
|
|
|
410
410
|
else:
|
|
411
411
|
from ultralytics.utils.files import increment_path
|
|
412
412
|
|
|
413
|
-
|
|
413
|
+
runs = (ROOT.parent / "tests/tmp/runs" if TESTS_RUNNING else RUNS_DIR) / args.task
|
|
414
|
+
nested = args.project and len(Path(args.project).parts) > 1 # e.g. "user/project" or "org\repo"
|
|
415
|
+
project = runs / args.project if nested else args.project or runs
|
|
414
416
|
name = name or args.name or f"{args.mode}"
|
|
415
|
-
save_dir = increment_path(Path(project) / name, exist_ok=args.exist_ok if RANK in {-1, 0} else True)
|
|
417
|
+
save_dir = increment_path(Path(project) / name, exist_ok=args.exist_ok if RANK in {-1, 0} else True, mkdir=True)
|
|
416
418
|
|
|
417
419
|
return Path(save_dir).resolve() # resolve to display full path in console
|
|
418
420
|
|
|
@@ -725,8 +727,8 @@ def handle_yolo_solutions(args: list[str]) -> None:
|
|
|
725
727
|
)
|
|
726
728
|
if solution_name == "analytics": # analytical graphs follow fixed shape for output i.e w=1920, h=1080
|
|
727
729
|
w, h = 1280, 720
|
|
728
|
-
save_dir = get_save_dir(SimpleNamespace(
|
|
729
|
-
save_dir.mkdir(parents=True) # create the output directory i.e. runs/solutions/exp
|
|
730
|
+
save_dir = get_save_dir(SimpleNamespace(task="solutions", name="exp", exist_ok=False, project=None))
|
|
731
|
+
save_dir.mkdir(parents=True, exist_ok=True) # create the output directory i.e. runs/solutions/exp
|
|
730
732
|
vw = cv2.VideoWriter(str(save_dir / f"{solution_name}.avi"), cv2.VideoWriter_fourcc(*"mp4v"), fps, (w, h))
|
|
731
733
|
|
|
732
734
|
try: # Process video frames
|
|
@@ -0,0 +1,346 @@
|
|
|
1
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
|
+
|
|
3
|
+
# Tsinghua-Tencent 100K (TT100K) dataset https://cg.cs.tsinghua.edu.cn/traffic-sign/ by Tsinghua University
|
|
4
|
+
# Documentation: https://cg.cs.tsinghua.edu.cn/traffic-sign/tutorial.html
|
|
5
|
+
# Paper: Traffic-Sign Detection and Classification in the Wild (CVPR 2016)
|
|
6
|
+
# License: CC BY-NC 2.0 license for non-commercial use only
|
|
7
|
+
# Example usage: yolo train data=TT100K.yaml
|
|
8
|
+
# parent
|
|
9
|
+
# ├── ultralytics
|
|
10
|
+
# └── datasets
|
|
11
|
+
# └── TT100K ← downloads here (~18 GB)
|
|
12
|
+
|
|
13
|
+
# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..]
|
|
14
|
+
path: TT100K # dataset root dir
|
|
15
|
+
train: images/train # train images (relative to 'path') 6105 images
|
|
16
|
+
val: images/val # val images (relative to 'path') 7641 images (original 'other' split)
|
|
17
|
+
test: images/test # test images (relative to 'path') 3071 images
|
|
18
|
+
|
|
19
|
+
# Classes (221 traffic sign categories, 45 with sufficient training instances)
|
|
20
|
+
names:
|
|
21
|
+
0: pl5
|
|
22
|
+
1: pl10
|
|
23
|
+
2: pl15
|
|
24
|
+
3: pl20
|
|
25
|
+
4: pl25
|
|
26
|
+
5: pl30
|
|
27
|
+
6: pl40
|
|
28
|
+
7: pl50
|
|
29
|
+
8: pl60
|
|
30
|
+
9: pl70
|
|
31
|
+
10: pl80
|
|
32
|
+
11: pl90
|
|
33
|
+
12: pl100
|
|
34
|
+
13: pl110
|
|
35
|
+
14: pl120
|
|
36
|
+
15: pm5
|
|
37
|
+
16: pm10
|
|
38
|
+
17: pm13
|
|
39
|
+
18: pm15
|
|
40
|
+
19: pm20
|
|
41
|
+
20: pm25
|
|
42
|
+
21: pm30
|
|
43
|
+
22: pm35
|
|
44
|
+
23: pm40
|
|
45
|
+
24: pm46
|
|
46
|
+
25: pm50
|
|
47
|
+
26: pm55
|
|
48
|
+
27: pm8
|
|
49
|
+
28: pn
|
|
50
|
+
29: pne
|
|
51
|
+
30: ph4
|
|
52
|
+
31: ph4.5
|
|
53
|
+
32: ph5
|
|
54
|
+
33: ps
|
|
55
|
+
34: pg
|
|
56
|
+
35: ph1.5
|
|
57
|
+
36: ph2
|
|
58
|
+
37: ph2.1
|
|
59
|
+
38: ph2.2
|
|
60
|
+
39: ph2.4
|
|
61
|
+
40: ph2.5
|
|
62
|
+
41: ph2.8
|
|
63
|
+
42: ph2.9
|
|
64
|
+
43: ph3
|
|
65
|
+
44: ph3.2
|
|
66
|
+
45: ph3.5
|
|
67
|
+
46: ph3.8
|
|
68
|
+
47: ph4.2
|
|
69
|
+
48: ph4.3
|
|
70
|
+
49: ph4.8
|
|
71
|
+
50: ph5.3
|
|
72
|
+
51: ph5.5
|
|
73
|
+
52: pb
|
|
74
|
+
53: pr10
|
|
75
|
+
54: pr100
|
|
76
|
+
55: pr20
|
|
77
|
+
56: pr30
|
|
78
|
+
57: pr40
|
|
79
|
+
58: pr45
|
|
80
|
+
59: pr50
|
|
81
|
+
60: pr60
|
|
82
|
+
61: pr70
|
|
83
|
+
62: pr80
|
|
84
|
+
63: pr90
|
|
85
|
+
64: p1
|
|
86
|
+
65: p2
|
|
87
|
+
66: p3
|
|
88
|
+
67: p4
|
|
89
|
+
68: p5
|
|
90
|
+
69: p6
|
|
91
|
+
70: p7
|
|
92
|
+
71: p8
|
|
93
|
+
72: p9
|
|
94
|
+
73: p10
|
|
95
|
+
74: p11
|
|
96
|
+
75: p12
|
|
97
|
+
76: p13
|
|
98
|
+
77: p14
|
|
99
|
+
78: p15
|
|
100
|
+
79: p16
|
|
101
|
+
80: p17
|
|
102
|
+
81: p18
|
|
103
|
+
82: p19
|
|
104
|
+
83: p20
|
|
105
|
+
84: p21
|
|
106
|
+
85: p22
|
|
107
|
+
86: p23
|
|
108
|
+
87: p24
|
|
109
|
+
88: p25
|
|
110
|
+
89: p26
|
|
111
|
+
90: p27
|
|
112
|
+
91: p28
|
|
113
|
+
92: pa8
|
|
114
|
+
93: pa10
|
|
115
|
+
94: pa12
|
|
116
|
+
95: pa13
|
|
117
|
+
96: pa14
|
|
118
|
+
97: pb5
|
|
119
|
+
98: pc
|
|
120
|
+
99: pg
|
|
121
|
+
100: ph1
|
|
122
|
+
101: ph1.3
|
|
123
|
+
102: ph1.5
|
|
124
|
+
103: ph2
|
|
125
|
+
104: ph3
|
|
126
|
+
105: ph4
|
|
127
|
+
106: ph5
|
|
128
|
+
107: pi
|
|
129
|
+
108: pl0
|
|
130
|
+
109: pl4
|
|
131
|
+
110: pl5
|
|
132
|
+
111: pl8
|
|
133
|
+
112: pl10
|
|
134
|
+
113: pl15
|
|
135
|
+
114: pl20
|
|
136
|
+
115: pl25
|
|
137
|
+
116: pl30
|
|
138
|
+
117: pl35
|
|
139
|
+
118: pl40
|
|
140
|
+
119: pl50
|
|
141
|
+
120: pl60
|
|
142
|
+
121: pl65
|
|
143
|
+
122: pl70
|
|
144
|
+
123: pl80
|
|
145
|
+
124: pl90
|
|
146
|
+
125: pl100
|
|
147
|
+
126: pl110
|
|
148
|
+
127: pl120
|
|
149
|
+
128: pm2
|
|
150
|
+
129: pm8
|
|
151
|
+
130: pm10
|
|
152
|
+
131: pm13
|
|
153
|
+
132: pm15
|
|
154
|
+
133: pm20
|
|
155
|
+
134: pm25
|
|
156
|
+
135: pm30
|
|
157
|
+
136: pm35
|
|
158
|
+
137: pm40
|
|
159
|
+
138: pm46
|
|
160
|
+
139: pm50
|
|
161
|
+
140: pm55
|
|
162
|
+
141: pn
|
|
163
|
+
142: pne
|
|
164
|
+
143: po
|
|
165
|
+
144: pr10
|
|
166
|
+
145: pr100
|
|
167
|
+
146: pr20
|
|
168
|
+
147: pr30
|
|
169
|
+
148: pr40
|
|
170
|
+
149: pr45
|
|
171
|
+
150: pr50
|
|
172
|
+
151: pr60
|
|
173
|
+
152: pr70
|
|
174
|
+
153: pr80
|
|
175
|
+
154: ps
|
|
176
|
+
155: w1
|
|
177
|
+
156: w2
|
|
178
|
+
157: w3
|
|
179
|
+
158: w5
|
|
180
|
+
159: w8
|
|
181
|
+
160: w10
|
|
182
|
+
161: w12
|
|
183
|
+
162: w13
|
|
184
|
+
163: w16
|
|
185
|
+
164: w18
|
|
186
|
+
165: w20
|
|
187
|
+
166: w21
|
|
188
|
+
167: w22
|
|
189
|
+
168: w24
|
|
190
|
+
169: w28
|
|
191
|
+
170: w30
|
|
192
|
+
171: w31
|
|
193
|
+
172: w32
|
|
194
|
+
173: w34
|
|
195
|
+
174: w35
|
|
196
|
+
175: w37
|
|
197
|
+
176: w38
|
|
198
|
+
177: w41
|
|
199
|
+
178: w42
|
|
200
|
+
179: w43
|
|
201
|
+
180: w44
|
|
202
|
+
181: w45
|
|
203
|
+
182: w46
|
|
204
|
+
183: w47
|
|
205
|
+
184: w48
|
|
206
|
+
185: w49
|
|
207
|
+
186: w50
|
|
208
|
+
187: w51
|
|
209
|
+
188: w52
|
|
210
|
+
189: w53
|
|
211
|
+
190: w54
|
|
212
|
+
191: w55
|
|
213
|
+
192: w56
|
|
214
|
+
193: w57
|
|
215
|
+
194: w58
|
|
216
|
+
195: w59
|
|
217
|
+
196: w60
|
|
218
|
+
197: w62
|
|
219
|
+
198: w63
|
|
220
|
+
199: w66
|
|
221
|
+
200: i1
|
|
222
|
+
201: i2
|
|
223
|
+
202: i3
|
|
224
|
+
203: i4
|
|
225
|
+
204: i5
|
|
226
|
+
205: i6
|
|
227
|
+
206: i7
|
|
228
|
+
207: i8
|
|
229
|
+
208: i9
|
|
230
|
+
209: i10
|
|
231
|
+
210: i11
|
|
232
|
+
211: i12
|
|
233
|
+
212: i13
|
|
234
|
+
213: i14
|
|
235
|
+
214: i15
|
|
236
|
+
215: il60
|
|
237
|
+
216: il80
|
|
238
|
+
217: il100
|
|
239
|
+
218: il110
|
|
240
|
+
219: io
|
|
241
|
+
220: ip
|
|
242
|
+
|
|
243
|
+
# Download script/URL (optional) ---------------------------------------------------------------------------------------
|
|
244
|
+
download: |
|
|
245
|
+
import json
|
|
246
|
+
import shutil
|
|
247
|
+
from pathlib import Path
|
|
248
|
+
|
|
249
|
+
from PIL import Image
|
|
250
|
+
|
|
251
|
+
from ultralytics.utils import TQDM
|
|
252
|
+
from ultralytics.utils.downloads import download
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def tt100k2yolo(dir):
|
|
256
|
+
"""Convert TT100K annotations to YOLO format with images/{split} and labels/{split} structure."""
|
|
257
|
+
data_dir = dir / "data"
|
|
258
|
+
anno_file = data_dir / "annotations.json"
|
|
259
|
+
|
|
260
|
+
print("Loading annotations...")
|
|
261
|
+
with open(anno_file, encoding="utf-8") as f:
|
|
262
|
+
data = json.load(f)
|
|
263
|
+
|
|
264
|
+
# Build class name to index mapping from yaml
|
|
265
|
+
names = yaml["names"]
|
|
266
|
+
class_to_idx = {v: k for k, v in names.items()}
|
|
267
|
+
|
|
268
|
+
# Create directories
|
|
269
|
+
for split in ["train", "val", "test"]:
|
|
270
|
+
(dir / "images" / split).mkdir(parents=True, exist_ok=True)
|
|
271
|
+
(dir / "labels" / split).mkdir(parents=True, exist_ok=True)
|
|
272
|
+
|
|
273
|
+
print("Converting annotations to YOLO format...")
|
|
274
|
+
skipped = 0
|
|
275
|
+
for img_id, img_data in TQDM(data["imgs"].items(), desc="Processing"):
|
|
276
|
+
img_path_str = img_data["path"]
|
|
277
|
+
if "train" in img_path_str:
|
|
278
|
+
split = "train"
|
|
279
|
+
elif "test" in img_path_str:
|
|
280
|
+
split = "test"
|
|
281
|
+
else:
|
|
282
|
+
split = "val"
|
|
283
|
+
|
|
284
|
+
# Source and destination paths
|
|
285
|
+
src_img = data_dir / img_path_str
|
|
286
|
+
if not src_img.exists():
|
|
287
|
+
continue
|
|
288
|
+
|
|
289
|
+
dst_img = dir / "images" / split / src_img.name
|
|
290
|
+
|
|
291
|
+
# Get image dimensions
|
|
292
|
+
try:
|
|
293
|
+
with Image.open(src_img) as img:
|
|
294
|
+
img_width, img_height = img.size
|
|
295
|
+
except Exception as e:
|
|
296
|
+
print(f"Error reading {src_img}: {e}")
|
|
297
|
+
continue
|
|
298
|
+
|
|
299
|
+
# Copy image to destination
|
|
300
|
+
shutil.copy2(src_img, dst_img)
|
|
301
|
+
|
|
302
|
+
# Convert annotations
|
|
303
|
+
label_file = dir / "labels" / split / f"{src_img.stem}.txt"
|
|
304
|
+
lines = []
|
|
305
|
+
|
|
306
|
+
for obj in img_data.get("objects", []):
|
|
307
|
+
category = obj["category"]
|
|
308
|
+
if category not in class_to_idx:
|
|
309
|
+
skipped += 1
|
|
310
|
+
continue
|
|
311
|
+
|
|
312
|
+
bbox = obj["bbox"]
|
|
313
|
+
xmin, ymin = bbox["xmin"], bbox["ymin"]
|
|
314
|
+
xmax, ymax = bbox["xmax"], bbox["ymax"]
|
|
315
|
+
|
|
316
|
+
# Convert to YOLO format (normalized center coordinates and dimensions)
|
|
317
|
+
x_center = ((xmin + xmax) / 2.0) / img_width
|
|
318
|
+
y_center = ((ymin + ymax) / 2.0) / img_height
|
|
319
|
+
width = (xmax - xmin) / img_width
|
|
320
|
+
height = (ymax - ymin) / img_height
|
|
321
|
+
|
|
322
|
+
# Clip to valid range
|
|
323
|
+
x_center = max(0, min(1, x_center))
|
|
324
|
+
y_center = max(0, min(1, y_center))
|
|
325
|
+
width = max(0, min(1, width))
|
|
326
|
+
height = max(0, min(1, height))
|
|
327
|
+
|
|
328
|
+
cls_idx = class_to_idx[category]
|
|
329
|
+
lines.append(f"{cls_idx} {x_center:.6f} {y_center:.6f} {width:.6f} {height:.6f}\n")
|
|
330
|
+
|
|
331
|
+
# Write label file
|
|
332
|
+
if lines:
|
|
333
|
+
label_file.write_text("".join(lines), encoding="utf-8")
|
|
334
|
+
|
|
335
|
+
if skipped:
|
|
336
|
+
print(f"Skipped {skipped} annotations with unknown categories")
|
|
337
|
+
print("Conversion complete!")
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
# Download
|
|
341
|
+
dir = Path(yaml["path"]) # dataset root dir
|
|
342
|
+
urls = ["https://cg.cs.tsinghua.edu.cn/traffic-sign/data_model_code/data.zip"]
|
|
343
|
+
download(urls, dir=dir, curl=True, threads=1)
|
|
344
|
+
|
|
345
|
+
# Convert
|
|
346
|
+
tt100k2yolo(dir)
|
ultralytics/engine/model.py
CHANGED
|
@@ -275,7 +275,7 @@ class Model(torch.nn.Module):
|
|
|
275
275
|
>>> model._load("yolo11n.pt")
|
|
276
276
|
>>> model._load("path/to/weights.pth", task="detect")
|
|
277
277
|
"""
|
|
278
|
-
if weights.lower().startswith(("https://", "http://", "rtsp://", "rtmp://", "tcp://")):
|
|
278
|
+
if weights.lower().startswith(("https://", "http://", "rtsp://", "rtmp://", "tcp://", "ul://")):
|
|
279
279
|
weights = checks.check_file(weights, download_dir=SETTINGS["weights_dir"]) # download and return local file
|
|
280
280
|
weights = checks.check_model_file_from_stem(weights) # add suffix, i.e. yolo11n -> yolo11n.pt
|
|
281
281
|
|
|
@@ -403,7 +403,7 @@ class Model(torch.nn.Module):
|
|
|
403
403
|
}
|
|
404
404
|
torch.save({**self.ckpt, **updates}, filename)
|
|
405
405
|
|
|
406
|
-
def info(self, detailed: bool = False, verbose: bool = True):
|
|
406
|
+
def info(self, detailed: bool = False, verbose: bool = True, imgsz: int | list[int, int] = 640):
|
|
407
407
|
"""Display model information.
|
|
408
408
|
|
|
409
409
|
This method provides an overview or detailed information about the model, depending on the arguments
|
|
@@ -412,6 +412,7 @@ class Model(torch.nn.Module):
|
|
|
412
412
|
Args:
|
|
413
413
|
detailed (bool): If True, shows detailed information about the model layers and parameters.
|
|
414
414
|
verbose (bool): If True, prints the information. If False, returns the information as a list.
|
|
415
|
+
imgsz (int | list[int, int]): Input image size used for FLOPs calculation.
|
|
415
416
|
|
|
416
417
|
Returns:
|
|
417
418
|
(list[str]): A list of strings containing various types of information about the model, including model
|
|
@@ -423,7 +424,7 @@ class Model(torch.nn.Module):
|
|
|
423
424
|
>>> info_list = model.info(detailed=True, verbose=False) # Returns detailed info as a list
|
|
424
425
|
"""
|
|
425
426
|
self._check_is_pytorch_model()
|
|
426
|
-
return self.model.info(detailed=detailed, verbose=verbose)
|
|
427
|
+
return self.model.info(detailed=detailed, verbose=verbose, imgsz=imgsz)
|
|
427
428
|
|
|
428
429
|
def fuse(self) -> None:
|
|
429
430
|
"""Fuse Conv2d and BatchNorm2d layers in the model for optimized inference.
|
ultralytics/engine/trainer.py
CHANGED
|
@@ -157,6 +157,27 @@ class BaseTrainer:
|
|
|
157
157
|
if self.device.type in {"cpu", "mps"}:
|
|
158
158
|
self.args.workers = 0 # faster CPU training as time dominated by inference, not dataloading
|
|
159
159
|
|
|
160
|
+
# Callbacks - initialize early so on_pretrain_routine_start can capture original args.data
|
|
161
|
+
self.callbacks = _callbacks or callbacks.get_default_callbacks()
|
|
162
|
+
|
|
163
|
+
if isinstance(self.args.device, str) and len(self.args.device): # i.e. device='0' or device='0,1,2,3'
|
|
164
|
+
world_size = len(self.args.device.split(","))
|
|
165
|
+
elif isinstance(self.args.device, (tuple, list)): # i.e. device=[0, 1, 2, 3] (multi-GPU from CLI is list)
|
|
166
|
+
world_size = len(self.args.device)
|
|
167
|
+
elif self.args.device in {"cpu", "mps"}: # i.e. device='cpu' or 'mps'
|
|
168
|
+
world_size = 0
|
|
169
|
+
elif torch.cuda.is_available(): # i.e. device=None or device='' or device=number
|
|
170
|
+
world_size = 1 # default to device 0
|
|
171
|
+
else: # i.e. device=None or device=''
|
|
172
|
+
world_size = 0
|
|
173
|
+
|
|
174
|
+
self.ddp = world_size > 1 and "LOCAL_RANK" not in os.environ
|
|
175
|
+
self.world_size = world_size
|
|
176
|
+
# Run on_pretrain_routine_start before get_dataset() to capture original args.data (e.g., ul:// URIs)
|
|
177
|
+
if RANK in {-1, 0} and not self.ddp:
|
|
178
|
+
callbacks.add_integration_callbacks(self)
|
|
179
|
+
self.run_callbacks("on_pretrain_routine_start")
|
|
180
|
+
|
|
160
181
|
# Model and Dataset
|
|
161
182
|
self.model = check_model_file_from_stem(self.args.model) # add suffix, i.e. yolo11n -> yolo11n.pt
|
|
162
183
|
with torch_distributed_zero_first(LOCAL_RANK): # avoid auto-downloading dataset multiple times
|
|
@@ -180,28 +201,6 @@ class BaseTrainer:
|
|
|
180
201
|
self.plot_idx = [0, 1, 2]
|
|
181
202
|
self.nan_recovery_attempts = 0
|
|
182
203
|
|
|
183
|
-
# Callbacks
|
|
184
|
-
self.callbacks = _callbacks or callbacks.get_default_callbacks()
|
|
185
|
-
|
|
186
|
-
if isinstance(self.args.device, str) and len(self.args.device): # i.e. device='0' or device='0,1,2,3'
|
|
187
|
-
world_size = len(self.args.device.split(","))
|
|
188
|
-
elif isinstance(self.args.device, (tuple, list)): # i.e. device=[0, 1, 2, 3] (multi-GPU from CLI is list)
|
|
189
|
-
world_size = len(self.args.device)
|
|
190
|
-
elif self.args.device in {"cpu", "mps"}: # i.e. device='cpu' or 'mps'
|
|
191
|
-
world_size = 0
|
|
192
|
-
elif torch.cuda.is_available(): # i.e. device=None or device='' or device=number
|
|
193
|
-
world_size = 1 # default to device 0
|
|
194
|
-
else: # i.e. device=None or device=''
|
|
195
|
-
world_size = 0
|
|
196
|
-
|
|
197
|
-
self.ddp = world_size > 1 and "LOCAL_RANK" not in os.environ
|
|
198
|
-
self.world_size = world_size
|
|
199
|
-
# Run subprocess if DDP training, else train normally
|
|
200
|
-
if RANK in {-1, 0} and not self.ddp:
|
|
201
|
-
callbacks.add_integration_callbacks(self)
|
|
202
|
-
# Start console logging immediately at trainer initialization
|
|
203
|
-
self.run_callbacks("on_pretrain_routine_start")
|
|
204
|
-
|
|
205
204
|
def add_callback(self, event: str, callback):
|
|
206
205
|
"""Append the given callback to the event's callback list."""
|
|
207
206
|
self.callbacks[event].append(callback)
|
|
@@ -631,13 +630,17 @@ class BaseTrainer:
|
|
|
631
630
|
try:
|
|
632
631
|
if self.args.task == "classify":
|
|
633
632
|
data = check_cls_dataset(self.args.data)
|
|
634
|
-
elif str(self.args.data).rsplit(".", 1)[-1] == "ndjson"
|
|
635
|
-
|
|
633
|
+
elif str(self.args.data).rsplit(".", 1)[-1] == "ndjson" or (
|
|
634
|
+
str(self.args.data).startswith("ul://") and "/datasets/" in str(self.args.data)
|
|
635
|
+
):
|
|
636
|
+
# Convert NDJSON to YOLO format (including ul:// platform dataset URIs)
|
|
636
637
|
import asyncio
|
|
637
638
|
|
|
638
639
|
from ultralytics.data.converter import convert_ndjson_to_yolo
|
|
640
|
+
from ultralytics.utils.checks import check_file
|
|
639
641
|
|
|
640
|
-
|
|
642
|
+
ndjson_file = check_file(self.args.data) # Resolve ul:// or URL to local .ndjson file
|
|
643
|
+
yaml_path = asyncio.run(convert_ndjson_to_yolo(ndjson_file))
|
|
641
644
|
self.args.data = str(yaml_path)
|
|
642
645
|
data = check_det_dataset(self.args.data)
|
|
643
646
|
elif str(self.args.data).rsplit(".", 1)[-1] in {"yaml", "yml"} or self.args.task in {
|
ultralytics/engine/tuner.py
CHANGED
|
@@ -378,6 +378,7 @@ class Tuner:
|
|
|
378
378
|
metrics = {}
|
|
379
379
|
train_args = {**vars(self.args), **mutated_hyp}
|
|
380
380
|
save_dir = get_save_dir(get_cfg(train_args))
|
|
381
|
+
train_args["save_dir"] = str(save_dir) # pass save_dir to subprocess to ensure same path is used
|
|
381
382
|
weights_dir = save_dir / "weights"
|
|
382
383
|
try:
|
|
383
384
|
# Train YOLO model with mutated hyperparameters (run in subprocess to avoid dataloader hang)
|
|
@@ -129,7 +129,7 @@ class ObjectCounter(BaseSolution):
|
|
|
129
129
|
str.capitalize(key): f"{'IN ' + str(value['IN']) if self.show_in else ''} "
|
|
130
130
|
f"{'OUT ' + str(value['OUT']) if self.show_out else ''}".strip()
|
|
131
131
|
for key, value in self.classwise_count.items()
|
|
132
|
-
if value["IN"] != 0 or (value["OUT"] != 0 and
|
|
132
|
+
if (value["IN"] != 0 and self.show_in) or (value["OUT"] != 0 and self.show_out)
|
|
133
133
|
}
|
|
134
134
|
if labels_dict:
|
|
135
135
|
self.annotator.display_analytics(plot_im, labels_dict, (104, 31, 17), (255, 255, 255), self.margin)
|
ultralytics/utils/benchmarks.py
CHANGED
|
@@ -460,7 +460,7 @@ class ProfileModels:
|
|
|
460
460
|
if file.suffix in {".pt", ".yaml", ".yml"}:
|
|
461
461
|
model = YOLO(str(file))
|
|
462
462
|
model.fuse() # to report correct params and GFLOPs in model.info()
|
|
463
|
-
model_info = model.info()
|
|
463
|
+
model_info = model.info(imgsz=self.imgsz)
|
|
464
464
|
if self.trt and self.device.type != "cpu" and not engine_file.is_file():
|
|
465
465
|
engine_file = model.export(
|
|
466
466
|
format="engine",
|
|
@@ -11,9 +11,6 @@ from time import time
|
|
|
11
11
|
from ultralytics.utils import ENVIRONMENT, GIT, LOGGER, PYTHON_VERSION, RANK, SETTINGS, TESTS_RUNNING, colorstr
|
|
12
12
|
|
|
13
13
|
PREFIX = colorstr("Platform: ")
|
|
14
|
-
_last_upload = 0 # Rate limit model uploads
|
|
15
|
-
_console_logger = None # Global console logger instance
|
|
16
|
-
_system_logger = None # Cached system logger instance
|
|
17
14
|
|
|
18
15
|
try:
|
|
19
16
|
assert not TESTS_RUNNING # do not log pytest
|
|
@@ -32,6 +29,83 @@ except (AssertionError, ImportError):
|
|
|
32
29
|
_api_key = None
|
|
33
30
|
|
|
34
31
|
|
|
32
|
+
def resolve_platform_uri(uri, hard=True):
|
|
33
|
+
"""Resolve ul:// URIs to signed URLs by authenticating with Ultralytics Platform.
|
|
34
|
+
|
|
35
|
+
Formats:
|
|
36
|
+
ul://username/datasets/slug -> Returns signed URL to NDJSON file
|
|
37
|
+
ul://username/project/model -> Returns signed URL to .pt file
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
uri (str): Platform URI starting with "ul://".
|
|
41
|
+
hard (bool): Whether to raise an error if resolution fails (FileNotFoundError only).
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
(str | None): Signed URL on success, None if not found and hard=False.
|
|
45
|
+
|
|
46
|
+
Raises:
|
|
47
|
+
ValueError: If API key is missing/invalid or URI format is wrong.
|
|
48
|
+
PermissionError: If access is denied.
|
|
49
|
+
RuntimeError: If resource is not ready (e.g., dataset still processing).
|
|
50
|
+
FileNotFoundError: If resource not found and hard=True.
|
|
51
|
+
ConnectionError: If network request fails and hard=True.
|
|
52
|
+
"""
|
|
53
|
+
import requests
|
|
54
|
+
|
|
55
|
+
path = uri[5:] # Remove "ul://"
|
|
56
|
+
parts = path.split("/")
|
|
57
|
+
|
|
58
|
+
api_key = os.getenv("ULTRALYTICS_API_KEY") or SETTINGS.get("api_key")
|
|
59
|
+
if not api_key:
|
|
60
|
+
raise ValueError(f"ULTRALYTICS_API_KEY required for '{uri}'. Get key at https://alpha.ultralytics.com/settings")
|
|
61
|
+
|
|
62
|
+
base = "https://alpha.ultralytics.com/api/webhooks"
|
|
63
|
+
headers = {"Authorization": f"Bearer {api_key}"}
|
|
64
|
+
|
|
65
|
+
# ul://username/datasets/slug
|
|
66
|
+
if len(parts) == 3 and parts[1] == "datasets":
|
|
67
|
+
username, _, slug = parts
|
|
68
|
+
url = f"{base}/datasets/{username}/{slug}/export"
|
|
69
|
+
|
|
70
|
+
# ul://username/project/model
|
|
71
|
+
elif len(parts) == 3:
|
|
72
|
+
username, project, model = parts
|
|
73
|
+
url = f"{base}/models/{username}/{project}/{model}/download"
|
|
74
|
+
|
|
75
|
+
else:
|
|
76
|
+
raise ValueError(f"Invalid platform URI: {uri}. Use ul://user/datasets/name or ul://user/project/model")
|
|
77
|
+
|
|
78
|
+
try:
|
|
79
|
+
r = requests.head(url, headers=headers, allow_redirects=False, timeout=30)
|
|
80
|
+
|
|
81
|
+
# Handle redirect responses (301, 302, 303, 307, 308)
|
|
82
|
+
if 300 <= r.status_code < 400 and "location" in r.headers:
|
|
83
|
+
return r.headers["location"] # Return signed URL
|
|
84
|
+
|
|
85
|
+
# Handle error responses
|
|
86
|
+
if r.status_code == 401:
|
|
87
|
+
raise ValueError(f"Invalid ULTRALYTICS_API_KEY for '{uri}'")
|
|
88
|
+
if r.status_code == 403:
|
|
89
|
+
raise PermissionError(f"Access denied for '{uri}'. Check dataset/model visibility settings.")
|
|
90
|
+
if r.status_code == 404:
|
|
91
|
+
if hard:
|
|
92
|
+
raise FileNotFoundError(f"Not found on platform: {uri}")
|
|
93
|
+
LOGGER.warning(f"Not found on platform: {uri}")
|
|
94
|
+
return None
|
|
95
|
+
if r.status_code == 409:
|
|
96
|
+
raise RuntimeError(f"Resource not ready: {uri}. Dataset may still be processing.")
|
|
97
|
+
|
|
98
|
+
# Unexpected response
|
|
99
|
+
r.raise_for_status()
|
|
100
|
+
raise RuntimeError(f"Unexpected response from platform for '{uri}': {r.status_code}")
|
|
101
|
+
|
|
102
|
+
except requests.exceptions.RequestException as e:
|
|
103
|
+
if hard:
|
|
104
|
+
raise ConnectionError(f"Failed to resolve {uri}: {e}") from e
|
|
105
|
+
LOGGER.warning(f"Failed to resolve {uri}: {e}")
|
|
106
|
+
return None
|
|
107
|
+
|
|
108
|
+
|
|
35
109
|
def _interp_plot(plot, n=101):
|
|
36
110
|
"""Interpolate plot curve data from 1000 to n points to reduce storage size."""
|
|
37
111
|
import numpy as np
|
|
@@ -60,22 +134,28 @@ def _interp_plot(plot, n=101):
|
|
|
60
134
|
return result
|
|
61
135
|
|
|
62
136
|
|
|
63
|
-
def _send(event, data, project, name):
|
|
64
|
-
"""Send event to Platform endpoint."""
|
|
137
|
+
def _send(event, data, project, name, model_id=None):
|
|
138
|
+
"""Send event to Platform endpoint. Returns response JSON on success."""
|
|
65
139
|
try:
|
|
66
|
-
|
|
140
|
+
payload = {"event": event, "project": project, "name": name, "data": data}
|
|
141
|
+
if model_id:
|
|
142
|
+
payload["modelId"] = model_id
|
|
143
|
+
r = requests.post(
|
|
67
144
|
"https://alpha.ultralytics.com/api/webhooks/training/metrics",
|
|
68
|
-
json=
|
|
145
|
+
json=payload,
|
|
69
146
|
headers={"Authorization": f"Bearer {_api_key}"},
|
|
70
147
|
timeout=10,
|
|
71
|
-
)
|
|
148
|
+
)
|
|
149
|
+
r.raise_for_status()
|
|
150
|
+
return r.json()
|
|
72
151
|
except Exception as e:
|
|
73
152
|
LOGGER.debug(f"Platform: Failed to send {event}: {e}")
|
|
153
|
+
return None
|
|
74
154
|
|
|
75
155
|
|
|
76
|
-
def _send_async(event, data, project, name):
|
|
156
|
+
def _send_async(event, data, project, name, model_id=None):
|
|
77
157
|
"""Send event asynchronously using bounded thread pool."""
|
|
78
|
-
_executor.submit(_send, event, data, project, name)
|
|
158
|
+
_executor.submit(_send, event, data, project, name, model_id)
|
|
79
159
|
|
|
80
160
|
|
|
81
161
|
def _upload_model(model_path, project, name):
|
|
@@ -171,13 +251,12 @@ def _get_environment_info():
|
|
|
171
251
|
|
|
172
252
|
def on_pretrain_routine_start(trainer):
|
|
173
253
|
"""Initialize Platform logging at training start."""
|
|
174
|
-
global _console_logger, _last_upload
|
|
175
|
-
|
|
176
254
|
if RANK not in {-1, 0} or not trainer.args.project:
|
|
177
255
|
return
|
|
178
256
|
|
|
179
|
-
#
|
|
180
|
-
|
|
257
|
+
# Per-trainer state to isolate concurrent training runs
|
|
258
|
+
trainer._platform_model_id = None
|
|
259
|
+
trainer._platform_last_upload = time()
|
|
181
260
|
|
|
182
261
|
project, name = str(trainer.args.project), str(trainer.args.name or "train")
|
|
183
262
|
url = f"https://alpha.ultralytics.com/{project}/{name}"
|
|
@@ -186,45 +265,43 @@ def on_pretrain_routine_start(trainer):
|
|
|
186
265
|
# Create callback to send console output to Platform
|
|
187
266
|
def send_console_output(content, line_count, chunk_id):
|
|
188
267
|
"""Send batched console output to Platform webhook."""
|
|
189
|
-
_send_async(
|
|
268
|
+
_send_async(
|
|
269
|
+
"console_output",
|
|
270
|
+
{"chunkId": chunk_id, "content": content, "lineCount": line_count},
|
|
271
|
+
project,
|
|
272
|
+
name,
|
|
273
|
+
getattr(trainer, "_platform_model_id", None),
|
|
274
|
+
)
|
|
190
275
|
|
|
191
276
|
# Start console capture with batching (5 lines or 5 seconds)
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
# Gather model info for richer metadata
|
|
196
|
-
model_info = {}
|
|
197
|
-
try:
|
|
198
|
-
info = model_info_for_loggers(trainer)
|
|
199
|
-
model_info = {
|
|
200
|
-
"parameters": info.get("model/parameters", 0),
|
|
201
|
-
"gflops": info.get("model/GFLOPs", 0),
|
|
202
|
-
"classes": getattr(trainer.model, "yaml", {}).get("nc", 0), # number of classes
|
|
203
|
-
}
|
|
204
|
-
except Exception:
|
|
205
|
-
pass
|
|
277
|
+
trainer._platform_console_logger = ConsoleLogger(batch_size=5, flush_interval=5.0, on_flush=send_console_output)
|
|
278
|
+
trainer._platform_console_logger.start_capture()
|
|
206
279
|
|
|
207
280
|
# Collect environment info (W&B-style metadata)
|
|
208
281
|
environment = _get_environment_info()
|
|
209
282
|
|
|
210
|
-
|
|
283
|
+
# Build trainArgs - callback runs before get_dataset() so args.data is still original (e.g., ul:// URIs)
|
|
284
|
+
# Note: model_info is sent later in on_fit_epoch_end (epoch 0) when the model is actually loaded
|
|
285
|
+
train_args = {k: str(v) for k, v in vars(trainer.args).items()}
|
|
286
|
+
|
|
287
|
+
# Send synchronously to get modelId for subsequent webhooks
|
|
288
|
+
response = _send(
|
|
211
289
|
"training_started",
|
|
212
290
|
{
|
|
213
|
-
"trainArgs":
|
|
291
|
+
"trainArgs": train_args,
|
|
214
292
|
"epochs": trainer.epochs,
|
|
215
293
|
"device": str(trainer.device),
|
|
216
|
-
"modelInfo": model_info,
|
|
217
294
|
"environment": environment,
|
|
218
295
|
},
|
|
219
296
|
project,
|
|
220
297
|
name,
|
|
221
298
|
)
|
|
299
|
+
if response and response.get("modelId"):
|
|
300
|
+
trainer._platform_model_id = response["modelId"]
|
|
222
301
|
|
|
223
302
|
|
|
224
303
|
def on_fit_epoch_end(trainer):
|
|
225
304
|
"""Log training and system metrics at epoch end."""
|
|
226
|
-
global _system_logger
|
|
227
|
-
|
|
228
305
|
if RANK not in {-1, 0} or not trainer.args.project:
|
|
229
306
|
return
|
|
230
307
|
|
|
@@ -233,44 +310,55 @@ def on_fit_epoch_end(trainer):
|
|
|
233
310
|
|
|
234
311
|
if trainer.optimizer and trainer.optimizer.param_groups:
|
|
235
312
|
metrics["lr"] = trainer.optimizer.param_groups[0]["lr"]
|
|
313
|
+
|
|
314
|
+
# Extract model info at epoch 0 (sent as separate field, not in metrics)
|
|
315
|
+
model_info = None
|
|
236
316
|
if trainer.epoch == 0:
|
|
237
317
|
try:
|
|
238
|
-
|
|
318
|
+
info = model_info_for_loggers(trainer)
|
|
319
|
+
model_info = {
|
|
320
|
+
"parameters": info.get("model/parameters", 0),
|
|
321
|
+
"gflops": info.get("model/GFLOPs", 0),
|
|
322
|
+
"speedMs": info.get("model/speed_PyTorch(ms)", 0),
|
|
323
|
+
}
|
|
239
324
|
except Exception:
|
|
240
325
|
pass
|
|
241
326
|
|
|
242
|
-
# Get system metrics (cache SystemLogger for efficiency)
|
|
327
|
+
# Get system metrics (cache SystemLogger on trainer for efficiency)
|
|
243
328
|
system = {}
|
|
244
329
|
try:
|
|
245
|
-
if
|
|
246
|
-
|
|
247
|
-
system =
|
|
330
|
+
if not hasattr(trainer, "_platform_system_logger"):
|
|
331
|
+
trainer._platform_system_logger = SystemLogger()
|
|
332
|
+
system = trainer._platform_system_logger.get_metrics(rates=True)
|
|
248
333
|
except Exception:
|
|
249
334
|
pass
|
|
250
335
|
|
|
336
|
+
payload = {
|
|
337
|
+
"epoch": trainer.epoch,
|
|
338
|
+
"metrics": metrics,
|
|
339
|
+
"system": system,
|
|
340
|
+
"fitness": trainer.fitness,
|
|
341
|
+
"best_fitness": trainer.best_fitness,
|
|
342
|
+
}
|
|
343
|
+
if model_info:
|
|
344
|
+
payload["modelInfo"] = model_info
|
|
345
|
+
|
|
251
346
|
_send_async(
|
|
252
347
|
"epoch_end",
|
|
253
|
-
|
|
254
|
-
"epoch": trainer.epoch,
|
|
255
|
-
"metrics": metrics,
|
|
256
|
-
"system": system,
|
|
257
|
-
"fitness": trainer.fitness,
|
|
258
|
-
"best_fitness": trainer.best_fitness,
|
|
259
|
-
},
|
|
348
|
+
payload,
|
|
260
349
|
project,
|
|
261
350
|
name,
|
|
351
|
+
getattr(trainer, "_platform_model_id", None),
|
|
262
352
|
)
|
|
263
353
|
|
|
264
354
|
|
|
265
355
|
def on_model_save(trainer):
|
|
266
356
|
"""Upload model checkpoint (rate limited to every 15 min)."""
|
|
267
|
-
global _last_upload
|
|
268
|
-
|
|
269
357
|
if RANK not in {-1, 0} or not trainer.args.project:
|
|
270
358
|
return
|
|
271
359
|
|
|
272
360
|
# Rate limit to every 15 minutes (900 seconds)
|
|
273
|
-
if time() -
|
|
361
|
+
if time() - getattr(trainer, "_platform_last_upload", 0) < 900:
|
|
274
362
|
return
|
|
275
363
|
|
|
276
364
|
model_path = trainer.best if trainer.best and Path(trainer.best).exists() else trainer.last
|
|
@@ -279,22 +367,20 @@ def on_model_save(trainer):
|
|
|
279
367
|
|
|
280
368
|
project, name = str(trainer.args.project), str(trainer.args.name or "train")
|
|
281
369
|
_upload_model_async(model_path, project, name)
|
|
282
|
-
|
|
370
|
+
trainer._platform_last_upload = time()
|
|
283
371
|
|
|
284
372
|
|
|
285
373
|
def on_train_end(trainer):
|
|
286
374
|
"""Log final results, upload best model, and send validation plot data."""
|
|
287
|
-
global _console_logger
|
|
288
|
-
|
|
289
375
|
if RANK not in {-1, 0} or not trainer.args.project:
|
|
290
376
|
return
|
|
291
377
|
|
|
292
378
|
project, name = str(trainer.args.project), str(trainer.args.name or "train")
|
|
293
379
|
|
|
294
380
|
# Stop console capture
|
|
295
|
-
if
|
|
296
|
-
|
|
297
|
-
|
|
381
|
+
if hasattr(trainer, "_platform_console_logger") and trainer._platform_console_logger:
|
|
382
|
+
trainer._platform_console_logger.stop_capture()
|
|
383
|
+
trainer._platform_console_logger = None
|
|
298
384
|
|
|
299
385
|
# Upload best model (blocking to ensure it completes)
|
|
300
386
|
model_path = None
|
|
@@ -332,6 +418,7 @@ def on_train_end(trainer):
|
|
|
332
418
|
},
|
|
333
419
|
project,
|
|
334
420
|
name,
|
|
421
|
+
getattr(trainer, "_platform_model_id", None),
|
|
335
422
|
)
|
|
336
423
|
url = f"https://alpha.ultralytics.com/{project}/{name}"
|
|
337
424
|
LOGGER.info(f"{PREFIX}View results at {url}")
|
ultralytics/utils/checks.py
CHANGED
|
@@ -592,7 +592,7 @@ def check_file(file, suffix="", download=True, download_dir=".", hard=True):
|
|
|
592
592
|
"""Search/download file (if necessary), check suffix (if provided), and return path.
|
|
593
593
|
|
|
594
594
|
Args:
|
|
595
|
-
file (str): File name or path.
|
|
595
|
+
file (str): File name or path, URL, platform URI (ul://), or GCS path (gs://).
|
|
596
596
|
suffix (str | tuple): Acceptable suffix or tuple of suffixes to validate against the file.
|
|
597
597
|
download (bool): Whether to download the file if it doesn't exist locally.
|
|
598
598
|
download_dir (str): Directory to download the file to.
|
|
@@ -610,7 +610,26 @@ def check_file(file, suffix="", download=True, download_dir=".", hard=True):
|
|
|
610
610
|
or file.lower().startswith("grpc://")
|
|
611
611
|
): # file exists or gRPC Triton images
|
|
612
612
|
return file
|
|
613
|
-
elif download and file.lower().startswith(
|
|
613
|
+
elif download and file.lower().startswith("ul://"): # Ultralytics Platform URI
|
|
614
|
+
from ultralytics.utils.callbacks.platform import resolve_platform_uri
|
|
615
|
+
|
|
616
|
+
url = resolve_platform_uri(file, hard=hard) # Convert to signed HTTPS URL
|
|
617
|
+
if url is None:
|
|
618
|
+
return [] # Not found, soft fail (consistent with file search behavior)
|
|
619
|
+
# Use URI path for unique directory structure: ul://user/project/model -> user/project/model/filename.pt
|
|
620
|
+
uri_path = file[5:] # Remove "ul://"
|
|
621
|
+
local_file = Path(download_dir) / uri_path / url2file(url)
|
|
622
|
+
if local_file.exists():
|
|
623
|
+
LOGGER.info(f"Found {clean_url(url)} locally at {local_file}")
|
|
624
|
+
else:
|
|
625
|
+
local_file.parent.mkdir(parents=True, exist_ok=True)
|
|
626
|
+
downloads.safe_download(url=url, file=local_file, unzip=False)
|
|
627
|
+
return str(local_file)
|
|
628
|
+
elif download and file.lower().startswith(
|
|
629
|
+
("https://", "http://", "rtsp://", "rtmp://", "tcp://", "gs://")
|
|
630
|
+
): # download
|
|
631
|
+
if file.startswith("gs://"):
|
|
632
|
+
file = "https://storage.googleapis.com/" + file[5:] # convert gs:// to public HTTPS URL
|
|
614
633
|
url = file # warning: Pathlib turns :// -> :/
|
|
615
634
|
file = Path(download_dir) / url2file(file) # '%2F' to '/', split https://url.com/file.txt?auth
|
|
616
635
|
if file.exists():
|
|
@@ -945,7 +964,7 @@ def is_rockchip():
|
|
|
945
964
|
with open("/proc/device-tree/compatible") as f:
|
|
946
965
|
dev_str = f.read()
|
|
947
966
|
*_, soc = dev_str.split(",")
|
|
948
|
-
if soc.replace("\x00", "") in RKNN_CHIPS:
|
|
967
|
+
if soc.replace("\x00", "").split("-", 1)[0] in RKNN_CHIPS:
|
|
949
968
|
return True
|
|
950
969
|
except OSError:
|
|
951
970
|
return False
|
ultralytics/utils/metrics.py
CHANGED
|
@@ -315,7 +315,7 @@ class ConfusionMatrix(DataExportMixin):
|
|
|
315
315
|
matches (dict): Contains the indices of ground truths and predictions categorized into TP, FP and FN.
|
|
316
316
|
"""
|
|
317
317
|
|
|
318
|
-
def __init__(self, names: dict[int, str] =
|
|
318
|
+
def __init__(self, names: dict[int, str] = {}, task: str = "detect", save_matches: bool = False):
|
|
319
319
|
"""Initialize a ConfusionMatrix instance.
|
|
320
320
|
|
|
321
321
|
Args:
|
ultralytics/utils/plotting.py
CHANGED
|
@@ -972,6 +972,9 @@ def plot_tune_results(csv_file: str = "tune_results.csv", exclude_zero_fitness_p
|
|
|
972
972
|
if exclude_zero_fitness_points:
|
|
973
973
|
mask = fitness > 0 # exclude zero-fitness points
|
|
974
974
|
x, fitness = x[mask], fitness[mask]
|
|
975
|
+
if len(fitness) == 0:
|
|
976
|
+
LOGGER.warning("No valid fitness values to plot (all iterations may have failed)")
|
|
977
|
+
return
|
|
975
978
|
# Iterative sigma rejection on lower bound only
|
|
976
979
|
for _ in range(3): # max 3 iterations
|
|
977
980
|
mean, std = fitness.mean(), fitness.std()
|
ultralytics/utils/tuner.py
CHANGED
|
@@ -35,9 +35,6 @@ def run_ray_tune(
|
|
|
35
35
|
>>> result_grid = model.tune(data="coco8.yaml", use_ray=True)
|
|
36
36
|
"""
|
|
37
37
|
LOGGER.info("💡 Learn about RayTune at https://docs.ultralytics.com/integrations/ray-tune")
|
|
38
|
-
if train_args is None:
|
|
39
|
-
train_args = {}
|
|
40
|
-
|
|
41
38
|
try:
|
|
42
39
|
checks.check_requirements("ray[tune]")
|
|
43
40
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ultralytics-opencv-headless
|
|
3
|
-
Version: 8.3.
|
|
3
|
+
Version: 8.3.251
|
|
4
4
|
Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
|
|
5
5
|
Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
|
|
6
6
|
Maintainer-email: Ultralytics <hello@ultralytics.com>
|
|
@@ -7,11 +7,11 @@ tests/test_exports.py,sha256=5G5EgDmars6d-N7TVnJdDFWId0IJs-yw03DvdQIjrNU,14246
|
|
|
7
7
|
tests/test_integrations.py,sha256=6QgSh9n0J04RdUYz08VeVOnKmf4S5MDEQ0chzS7jo_c,6220
|
|
8
8
|
tests/test_python.py,sha256=viMvRajIbDZdm64hRRg9i8qZ1sU9frwB69e56mxwEXk,29266
|
|
9
9
|
tests/test_solutions.py,sha256=CIaphpmOXgz9AE9xcm1RWODKrwGfZLCc84IggGXArNM,14122
|
|
10
|
-
ultralytics/__init__.py,sha256=
|
|
10
|
+
ultralytics/__init__.py,sha256=j9-lnGcbIIGF2MHagQOsp9qgxboU8IyawT1JqTp00zI,1302
|
|
11
11
|
ultralytics/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
|
|
12
12
|
ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
|
|
13
13
|
ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
|
|
14
|
-
ultralytics/cfg/__init__.py,sha256=
|
|
14
|
+
ultralytics/cfg/__init__.py,sha256=sJfreQYmFkCaW9eWex-Um1tG-4zRpC2Q7GuJAWBrFpk,40401
|
|
15
15
|
ultralytics/cfg/default.yaml,sha256=KKENSHolDSto1HJVGjBvTXvz9ae-XMcYRzKrjU3QfZc,8912
|
|
16
16
|
ultralytics/cfg/datasets/Argoverse.yaml,sha256=QGpdh3Hj5dFrvbsaE_8rAVj9BO4XpKTB7uhXaTTnE-o,3364
|
|
17
17
|
ultralytics/cfg/datasets/DOTAv1.5.yaml,sha256=KE7VC-ZMDSei1pLPm-pdk_ZAMRU_gLwGgtIQNbwp6dA,1212
|
|
@@ -21,6 +21,7 @@ ultralytics/cfg/datasets/HomeObjects-3K.yaml,sha256=xEtSqEad-rtfGuIrERjjhdISggmP
|
|
|
21
21
|
ultralytics/cfg/datasets/ImageNet.yaml,sha256=N9NHhIgnlNIBqZZbzQZAW3aCnz6RSXQABnopaDs5BmE,42529
|
|
22
22
|
ultralytics/cfg/datasets/Objects365.yaml,sha256=8Bl-NAm0mlMW8EfMsz39JZo-HCvmp0ejJXaMeoHTpqw,9649
|
|
23
23
|
ultralytics/cfg/datasets/SKU-110K.yaml,sha256=xvRkq3SdDOwBA91U85bln7HTXkod5MvFX6pt1PxTjJE,2609
|
|
24
|
+
ultralytics/cfg/datasets/TT100K.yaml,sha256=qrJ6nrZdvrMy5ov9FaHn-pFI8hJn_WLYaB60vhtCOxs,6918
|
|
24
25
|
ultralytics/cfg/datasets/VOC.yaml,sha256=XpaegRHjp7xZnenOuA9zgg2lQURSL-o7mLQwzIKKuqM,3803
|
|
25
26
|
ultralytics/cfg/datasets/VisDrone.yaml,sha256=PfudojW5av_5q-dC9VsG_xhvuv9cTGEpRp4loXCJ4Ng,3397
|
|
26
27
|
ultralytics/cfg/datasets/african-wildlife.yaml,sha256=6UfO_gnwJEDVq05p72IMJfkTIKZlXKNLSeKru-JyTrQ,915
|
|
@@ -123,11 +124,11 @@ ultralytics/data/scripts/get_coco128.sh,sha256=qmRQl_hOKrsdHrTrnyQuFIH01oDz3lfaz
|
|
|
123
124
|
ultralytics/data/scripts/get_imagenet.sh,sha256=hr42H16bM47iT27rgS7MpEo-GeOZAYUQXgr0B2cwn48,1705
|
|
124
125
|
ultralytics/engine/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
|
|
125
126
|
ultralytics/engine/exporter.py,sha256=Ncf5GK5xAqSu0DH-6z5V53qZB7LstDJFTMF5a-7VQfs,72639
|
|
126
|
-
ultralytics/engine/model.py,sha256=
|
|
127
|
+
ultralytics/engine/model.py,sha256=1Ex0Q7XOwWWtTsTMk-7O4wWiA2cYGayKJwB3zDC1XTg,53223
|
|
127
128
|
ultralytics/engine/predictor.py,sha256=neYmNDX27Vv3ggk9xqaKlH6XzB2vlFIghU5o7ZC0zFo,22838
|
|
128
129
|
ultralytics/engine/results.py,sha256=DomI01voqR_i7v8LhDGb6jWCprWB4H6I436GSO2NMBY,68030
|
|
129
|
-
ultralytics/engine/trainer.py,sha256=
|
|
130
|
-
ultralytics/engine/tuner.py,sha256=
|
|
130
|
+
ultralytics/engine/trainer.py,sha256=riVwjf_4uhrkH5TYjAvRQmIerNT7pxPBM8jWA60oF-A,45851
|
|
131
|
+
ultralytics/engine/tuner.py,sha256=xZGIYwpQVdnzQcdEmLc70eQy7G7swQQEgdDGxoBLmHY,21570
|
|
131
132
|
ultralytics/engine/validator.py,sha256=2rqdVt4hB9ruMJq-L7PbaCNFwuERS7ZHdVSg91RM3wk,17761
|
|
132
133
|
ultralytics/hub/__init__.py,sha256=Z0K_E00jzQh90b18q3IDChwVmTvyIYp6C00sCV-n2F8,6709
|
|
133
134
|
ultralytics/hub/auth.py,sha256=ANzCeZA7lUzTWc_sFHbDuuyBh1jLl2sTpHkoUbIkFYE,6254
|
|
@@ -227,7 +228,7 @@ ultralytics/solutions/distance_calculation.py,sha256=RcpRDodEHAJUug9tobtQKt5_byS
|
|
|
227
228
|
ultralytics/solutions/heatmap.py,sha256=DUyV5UFsOwZ8ArN4BtW8Vm3ps8_VZXc6VP0uiKyGDWY,5481
|
|
228
229
|
ultralytics/solutions/instance_segmentation.py,sha256=eggk1uWCZ-6cp0YfxCGVUwnKS6xqJua946oxafjAXGk,3778
|
|
229
230
|
ultralytics/solutions/object_blurrer.py,sha256=EZrv3oU68kEaahAxlhk9cF5ZKFtoVaW8bDB4Css9xe0,3981
|
|
230
|
-
ultralytics/solutions/object_counter.py,sha256=
|
|
231
|
+
ultralytics/solutions/object_counter.py,sha256=OpMSLlenDK-cLvCgCOoKbqMXIZrngyqP8DP6ZeEnWL8,9355
|
|
231
232
|
ultralytics/solutions/object_cropper.py,sha256=WRbrfXAR5aD6PQBqJ-BvcVaiaqta_9YeTlXN2dY274s,3510
|
|
232
233
|
ultralytics/solutions/parking_management.py,sha256=FQKeLEiwnTmRcXqsNOlOt9GTFPjkyvnE5pwwKnneJa4,13770
|
|
233
234
|
ultralytics/solutions/queue_management.py,sha256=NlVX6PMEaffjoZjfQrVyayaDUdtc0JF8GzTQrZFjpCg,4371
|
|
@@ -252,8 +253,8 @@ ultralytics/trackers/utils/matching.py,sha256=x6uZOIx0O9oVmAcfY6tYMTJQE2cDTUlRR6
|
|
|
252
253
|
ultralytics/utils/__init__.py,sha256=JfvODTB4mG_JOhTeCiPtq0iCEgiCh14hJf195rnOhLQ,55145
|
|
253
254
|
ultralytics/utils/autobatch.py,sha256=jiE4m_--H9UkXFDm_FqzcZk_hSTCGpS72XdVEKgZwAo,5114
|
|
254
255
|
ultralytics/utils/autodevice.py,sha256=rXlPuo-iX-vZ4BabmMGEGh9Uxpau4R7Zlt1KCo9Xfyc,8892
|
|
255
|
-
ultralytics/utils/benchmarks.py,sha256=
|
|
256
|
-
ultralytics/utils/checks.py,sha256=
|
|
256
|
+
ultralytics/utils/benchmarks.py,sha256=KOFm2AZPehrJajbUu6NTdZoVOFjTpLhUUnfL59sC60w,32293
|
|
257
|
+
ultralytics/utils/checks.py,sha256=DheB1ip9ba7ZW_fjPieNdx98vZpwUDbnCKmavAIzJL4,39411
|
|
257
258
|
ultralytics/utils/cpu.py,sha256=OksKOlX93AsbSsFuoYvLXRXgpkOibrZSwQyW6lipt4Q,3493
|
|
258
259
|
ultralytics/utils/dist.py,sha256=hOuY1-unhQAY-uWiZw3LWw36d1mqJuYK75NdlwB4oKE,4131
|
|
259
260
|
ultralytics/utils/downloads.py,sha256=IyiGjjXqOyf1B0qLMk7vE6sSQ8s232OhKS8aj9XbTgs,22883
|
|
@@ -264,16 +265,16 @@ ultralytics/utils/git.py,sha256=UdqeIiiEzg1qkerAZrg5YtTYPuJYwrpxW9N_6Pq6s8U,5501
|
|
|
264
265
|
ultralytics/utils/instance.py,sha256=11mhefvTI9ftMqSirXuiViAi0Fxlo6v84qvNxfRNUoE,18862
|
|
265
266
|
ultralytics/utils/logger.py,sha256=T5iaNnaqbCvx_FZf1dhVkr5FVxyxb4vO17t4SJfCIhg,19132
|
|
266
267
|
ultralytics/utils/loss.py,sha256=t-z7qkvqF8OtuRHrj2wmvClZV2CCumIRi9jnqkc9i_A,39573
|
|
267
|
-
ultralytics/utils/metrics.py,sha256=
|
|
268
|
+
ultralytics/utils/metrics.py,sha256=SpyMGnuRwwmorJqSdUsDQquVpGmgfj1X3PNDiw_ZZWM,69152
|
|
268
269
|
ultralytics/utils/nms.py,sha256=zv1rOzMF6WU8Kdk41VzNf1H1EMt_vZHcbDFbg3mnN2o,14248
|
|
269
270
|
ultralytics/utils/ops.py,sha256=nWvTLJSBeW_XrxCy5Ytxl7sZJHp2sRqyCv4mm8QwYnw,25797
|
|
270
271
|
ultralytics/utils/patches.py,sha256=mD3slAMAhcezzP42_fOWmacNMU6zXB68Br4_EBCyIjs,7117
|
|
271
|
-
ultralytics/utils/plotting.py,sha256=
|
|
272
|
+
ultralytics/utils/plotting.py,sha256=_iXs4gs8tzMSgiKxCriD4un-MJkOsC3lGSy0wn7qZGk,48433
|
|
272
273
|
ultralytics/utils/tal.py,sha256=w7oi6fp0NmL6hHh-yvCCX1cBuuB4JuX7w1wiR4_SMZs,20678
|
|
273
274
|
ultralytics/utils/torch_utils.py,sha256=zOPUQlorTiEPSkqlSEPyaQhpmzmgOIKF7f3xJb0UjdQ,40268
|
|
274
275
|
ultralytics/utils/tqdm.py,sha256=4kL_nczykHu6VxRzRSbvUSJknrCZydoS_ZegZkFXpsg,16197
|
|
275
276
|
ultralytics/utils/triton.py,sha256=BQu3CD3OlT76d1OtmnX5slQU37VC1kzRvEtfI2saIQA,5211
|
|
276
|
-
ultralytics/utils/tuner.py,sha256=
|
|
277
|
+
ultralytics/utils/tuner.py,sha256=1PM7G89X95Yfmhskk8LBXU8T-Bfiln1Ajbnz2lkgvAI,7303
|
|
277
278
|
ultralytics/utils/callbacks/__init__.py,sha256=hzL63Rce6VkZhP4Lcim9LKjadixaQG86nKqPhk7IkS0,242
|
|
278
279
|
ultralytics/utils/callbacks/base.py,sha256=floD31JHqHpiVabQiE76_hzC_j7KjtL4w_czkD1bLKc,6883
|
|
279
280
|
ultralytics/utils/callbacks/clearml.py,sha256=LjfNe4mswceCOpEGVLxqGXjkl_XGbef4awdcp4502RU,5831
|
|
@@ -282,7 +283,7 @@ ultralytics/utils/callbacks/dvc.py,sha256=YT0Sa5P8Huj8Fn9jM2P6MYzUY3PIVxsa5BInVi
|
|
|
282
283
|
ultralytics/utils/callbacks/hub.py,sha256=fVLqqr3ZM6hoYFlVMEeejfq1MWDrkWCskPFOG3HGILQ,4159
|
|
283
284
|
ultralytics/utils/callbacks/mlflow.py,sha256=wCXjQgdufp9LYujqMzLZOmIOur6kvrApHNeo9dA7t_g,5323
|
|
284
285
|
ultralytics/utils/callbacks/neptune.py,sha256=_vt3cMwDHCR-LyT3KtRikGpj6AG11oQ-skUUUUdZ74o,4391
|
|
285
|
-
ultralytics/utils/callbacks/platform.py,sha256=
|
|
286
|
+
ultralytics/utils/callbacks/platform.py,sha256=BMsab6x1ARBKQkS8BQoPlSqchQqQpM3onsakrhP03Ws,15541
|
|
286
287
|
ultralytics/utils/callbacks/raytune.py,sha256=Y0dFyNZVRuFovSh7nkgUIHTQL3xIXOACElgHuYbg_5I,1278
|
|
287
288
|
ultralytics/utils/callbacks/tensorboard.py,sha256=PTJYvD2gqRUN8xw5VoTjvKnu2adukLfvhMlDgTnTiFU,4952
|
|
288
289
|
ultralytics/utils/callbacks/wb.py,sha256=ghmL3gigOa-z_F54-TzMraKw9MAaYX-Wk4H8dLoRvX8,7705
|
|
@@ -290,9 +291,9 @@ ultralytics/utils/export/__init__.py,sha256=Cfh-PwVfTF_lwPp-Ss4wiX4z8Sm1XRPklsqd
|
|
|
290
291
|
ultralytics/utils/export/engine.py,sha256=23-lC6dNsmz5vprSJzaN7UGNXrFlVedNcqhlOH_IXes,9956
|
|
291
292
|
ultralytics/utils/export/imx.py,sha256=2_mcNzqRIk5LB92JofqNYLN0kkQke1UgKT2jWmEy_l4,13300
|
|
292
293
|
ultralytics/utils/export/tensorflow.py,sha256=igYzwbdblb9YgfV4Jgl5lMvynuVRcF51dAzI7j-BBI0,9966
|
|
293
|
-
ultralytics_opencv_headless-8.3.
|
|
294
|
-
ultralytics_opencv_headless-8.3.
|
|
295
|
-
ultralytics_opencv_headless-8.3.
|
|
296
|
-
ultralytics_opencv_headless-8.3.
|
|
297
|
-
ultralytics_opencv_headless-8.3.
|
|
298
|
-
ultralytics_opencv_headless-8.3.
|
|
294
|
+
ultralytics_opencv_headless-8.3.251.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
|
|
295
|
+
ultralytics_opencv_headless-8.3.251.dist-info/METADATA,sha256=XrZj8u9a5BhVf-u21PYrPhgUOhjrhXhp3HwFNnRMSY4,37728
|
|
296
|
+
ultralytics_opencv_headless-8.3.251.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
297
|
+
ultralytics_opencv_headless-8.3.251.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
|
|
298
|
+
ultralytics_opencv_headless-8.3.251.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
|
|
299
|
+
ultralytics_opencv_headless-8.3.251.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|