dgenerate-ultralytics-headless 8.3.135__py3-none-any.whl → 8.3.138__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dgenerate_ultralytics_headless-8.3.135.dist-info → dgenerate_ultralytics_headless-8.3.138.dist-info}/METADATA +1 -2
- {dgenerate_ultralytics_headless-8.3.135.dist-info → dgenerate_ultralytics_headless-8.3.138.dist-info}/RECORD +40 -40
- tests/test_cuda.py +2 -7
- tests/test_exports.py +1 -6
- tests/test_solutions.py +181 -8
- ultralytics/__init__.py +1 -1
- ultralytics/cfg/__init__.py +4 -4
- ultralytics/data/base.py +1 -1
- ultralytics/data/build.py +4 -3
- ultralytics/data/loaders.py +2 -2
- ultralytics/engine/exporter.py +6 -7
- ultralytics/engine/model.py +2 -2
- ultralytics/engine/predictor.py +3 -10
- ultralytics/engine/trainer.py +1 -1
- ultralytics/engine/validator.py +1 -1
- ultralytics/hub/auth.py +2 -2
- ultralytics/hub/utils.py +8 -3
- ultralytics/models/yolo/classify/predict.py +11 -0
- ultralytics/models/yolo/obb/val.py +1 -1
- ultralytics/models/yolo/world/train.py +66 -20
- ultralytics/models/yolo/world/train_world.py +1 -0
- ultralytics/models/yolo/yoloe/train.py +10 -39
- ultralytics/models/yolo/yoloe/val.py +3 -3
- ultralytics/nn/tasks.py +41 -24
- ultralytics/nn/text_model.py +1 -0
- ultralytics/solutions/similarity_search.py +3 -6
- ultralytics/solutions/streamlit_inference.py +1 -1
- ultralytics/utils/__init__.py +1 -1
- ultralytics/utils/callbacks/hub.py +5 -4
- ultralytics/utils/checks.py +16 -13
- ultralytics/utils/downloads.py +7 -5
- ultralytics/utils/export.py +1 -1
- ultralytics/utils/metrics.py +51 -22
- ultralytics/utils/plotting.py +19 -13
- ultralytics/utils/torch_utils.py +3 -0
- ultralytics/utils/triton.py +1 -1
- {dgenerate_ultralytics_headless-8.3.135.dist-info → dgenerate_ultralytics_headless-8.3.138.dist-info}/WHEEL +0 -0
- {dgenerate_ultralytics_headless-8.3.135.dist-info → dgenerate_ultralytics_headless-8.3.138.dist-info}/entry_points.txt +0 -0
- {dgenerate_ultralytics_headless-8.3.135.dist-info → dgenerate_ultralytics_headless-8.3.138.dist-info}/licenses/LICENSE +0 -0
- {dgenerate_ultralytics_headless-8.3.135.dist-info → dgenerate_ultralytics_headless-8.3.138.dist-info}/top_level.txt +0 -0
ultralytics/utils/checks.py
CHANGED
@@ -73,13 +73,14 @@ def parse_requirements(file_path=ROOT.parent / "requirements.txt", package=""):
|
|
73
73
|
for line in requires:
|
74
74
|
line = line.strip()
|
75
75
|
if line and not line.startswith("#"):
|
76
|
-
line = line.
|
76
|
+
line = line.partition("#")[0].strip() # ignore inline comments
|
77
77
|
if match := re.match(r"([a-zA-Z0-9-_]+)\s*([<>!=~]+.*)?", line):
|
78
78
|
requirements.append(SimpleNamespace(name=match[1], specifier=match[2].strip() if match[2] else ""))
|
79
79
|
|
80
80
|
return requirements
|
81
81
|
|
82
82
|
|
83
|
+
@functools.lru_cache
|
83
84
|
def parse_version(version="0.0.0") -> tuple:
|
84
85
|
"""
|
85
86
|
Convert a version string to a tuple of integers, ignoring any extra non-numeric string attached to the version.
|
@@ -164,6 +165,7 @@ def check_imgsz(imgsz, stride=32, min_dim=1, max_dim=2, floor=0):
|
|
164
165
|
return sz
|
165
166
|
|
166
167
|
|
168
|
+
@functools.lru_cache
|
167
169
|
def check_version(
|
168
170
|
current: str = "0.0.0",
|
169
171
|
required: str = "0.0.0",
|
@@ -377,7 +379,7 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
|
|
377
379
|
|
378
380
|
pkgs = []
|
379
381
|
for r in requirements:
|
380
|
-
r_stripped = r.
|
382
|
+
r_stripped = r.rpartition("/")[-1].replace(".git", "") # replace git+https://org/repo.git -> 'repo'
|
381
383
|
match = re.match(r"([a-zA-Z0-9-_]+)([<>!=~]+.*)?", r_stripped)
|
382
384
|
name, required = match[1], match[2].strip() if match[2] else ""
|
383
385
|
try:
|
@@ -421,6 +423,7 @@ def check_torchvision():
|
|
421
423
|
to the compatibility table based on: https://github.com/pytorch/vision#installation.
|
422
424
|
"""
|
423
425
|
compatibility_table = {
|
426
|
+
"2.7": ["0.22"],
|
424
427
|
"2.6": ["0.21"],
|
425
428
|
"2.5": ["0.20"],
|
426
429
|
"2.4": ["0.19"],
|
@@ -433,10 +436,10 @@ def check_torchvision():
|
|
433
436
|
}
|
434
437
|
|
435
438
|
# Check major and minor versions
|
436
|
-
v_torch = ".".join(torch.__version__.split("+")[0].split(".")[:2])
|
439
|
+
v_torch = ".".join(torch.__version__.split("+", 1)[0].split(".")[:2])
|
437
440
|
if v_torch in compatibility_table:
|
438
441
|
compatible_versions = compatibility_table[v_torch]
|
439
|
-
v_torchvision = ".".join(TORCHVISION_VERSION.split("+")[0].split(".")[:2])
|
442
|
+
v_torchvision = ".".join(TORCHVISION_VERSION.split("+", 1)[0].split(".")[:2])
|
440
443
|
if all(v_torchvision != v for v in compatible_versions):
|
441
444
|
LOGGER.warning(
|
442
445
|
f"torchvision=={v_torchvision} is incompatible with torch=={v_torch}.\n"
|
@@ -459,9 +462,8 @@ def check_suffix(file="yolo11n.pt", suffix=".pt", msg=""):
|
|
459
462
|
if isinstance(suffix, str):
|
460
463
|
suffix = {suffix}
|
461
464
|
for f in file if isinstance(file, (list, tuple)) else [file]:
|
462
|
-
s
|
463
|
-
|
464
|
-
assert s in suffix, f"{msg}{f} acceptable suffix is {suffix}, not {s}"
|
465
|
+
if s := str(f).rpartition(".")[-1].lower().strip(): # file suffix
|
466
|
+
assert f".{s}" in suffix, f"{msg}{f} acceptable suffix is {suffix}, not .{s}"
|
465
467
|
|
466
468
|
|
467
469
|
def check_yolov5u_filename(file: str, verbose: bool = True):
|
@@ -502,10 +504,10 @@ def check_model_file_from_stem(model="yolo11n"):
|
|
502
504
|
Returns:
|
503
505
|
(str | Path): Model filename with appropriate suffix.
|
504
506
|
"""
|
505
|
-
|
506
|
-
|
507
|
-
|
508
|
-
|
507
|
+
path = Path(model)
|
508
|
+
if not path.suffix and path.stem in downloads.GITHUB_ASSETS_STEMS:
|
509
|
+
return path.with_suffix(".pt") # add suffix, i.e. yolo11n -> yolo11n.pt
|
510
|
+
return model
|
509
511
|
|
510
512
|
|
511
513
|
def check_file(file, suffix="", download=True, download_dir=".", hard=True):
|
@@ -580,6 +582,7 @@ def check_is_path_safe(basedir, path):
|
|
580
582
|
return path_resolved.exists() and path_resolved.parts[: len(base_dir_resolved.parts)] == base_dir_resolved.parts
|
581
583
|
|
582
584
|
|
585
|
+
@functools.lru_cache
|
583
586
|
def check_imshow(warn=False):
|
584
587
|
"""
|
585
588
|
Check if environment supports image displays.
|
@@ -652,7 +655,7 @@ def collect_system_info():
|
|
652
655
|
from ultralytics.utils.torch_utils import get_cpu_info, get_gpu_info
|
653
656
|
|
654
657
|
gib = 1 << 30 # bytes per GiB
|
655
|
-
cuda = torch
|
658
|
+
cuda = torch.cuda.is_available()
|
656
659
|
check_yolo()
|
657
660
|
total, used, free = shutil.disk_usage("/")
|
658
661
|
|
@@ -834,7 +837,7 @@ def cuda_device_count() -> int:
|
|
834
837
|
)
|
835
838
|
|
836
839
|
# Take the first line and strip any leading/trailing white space
|
837
|
-
first_line = output.strip().split("\n")[0]
|
840
|
+
first_line = output.strip().split("\n", 1)[0]
|
838
841
|
|
839
842
|
return int(first_line)
|
840
843
|
except (subprocess.CalledProcessError, FileNotFoundError, ValueError):
|
ultralytics/utils/downloads.py
CHANGED
@@ -32,11 +32,13 @@ GITHUB_ASSETS_NAMES = frozenset(
|
|
32
32
|
+ [f"sam2.1_{k}.pt" for k in "blst"]
|
33
33
|
+ [f"FastSAM-{k}.pt" for k in "sx"]
|
34
34
|
+ [f"rtdetr-{k}.pt" for k in "lx"]
|
35
|
-
+ [
|
36
|
-
|
37
|
-
|
35
|
+
+ [
|
36
|
+
"mobile_sam.pt",
|
37
|
+
"mobileclip_blt.ts",
|
38
|
+
"calibration_image_sample_data_20x128x128x3_float32.npy.zip",
|
39
|
+
]
|
38
40
|
)
|
39
|
-
GITHUB_ASSETS_STEMS = frozenset(k.
|
41
|
+
GITHUB_ASSETS_STEMS = frozenset(k.rpartition(".")[0] for k in GITHUB_ASSETS_NAMES)
|
40
42
|
|
41
43
|
|
42
44
|
def is_url(url, check=False):
|
@@ -247,7 +249,7 @@ def get_google_drive_file_info(link):
|
|
247
249
|
"""
|
248
250
|
import requests # slow import
|
249
251
|
|
250
|
-
file_id = link.split("/d/")[1].split("/view")[0]
|
252
|
+
file_id = link.split("/d/")[1].split("/view", 1)[0]
|
251
253
|
drive_url = f"https://drive.google.com/uc?export=download&id={file_id}"
|
252
254
|
filename = None
|
253
255
|
|
ultralytics/utils/export.py
CHANGED
@@ -97,7 +97,7 @@ def export_engine(
|
|
97
97
|
builder = trt.Builder(logger)
|
98
98
|
config = builder.create_builder_config()
|
99
99
|
workspace = int((workspace or 0) * (1 << 30))
|
100
|
-
is_trt10 = int(trt.__version__.split(".")[0]) >= 10 # is TensorRT >= 10
|
100
|
+
is_trt10 = int(trt.__version__.split(".", 1)[0]) >= 10 # is TensorRT >= 10
|
101
101
|
if is_trt10 and workspace > 0:
|
102
102
|
config.set_memory_pool_limit(trt.MemoryPoolType.WORKSPACE, workspace)
|
103
103
|
elif workspace > 0: # TensorRT versions 7, 8
|
ultralytics/utils/metrics.py
CHANGED
@@ -409,7 +409,7 @@ class ConfusionMatrix:
|
|
409
409
|
@plt_settings()
|
410
410
|
def plot(self, normalize=True, save_dir="", names=(), on_plot=None):
|
411
411
|
"""
|
412
|
-
Plot the confusion matrix using
|
412
|
+
Plot the confusion matrix using matplotlib and save it to a file.
|
413
413
|
|
414
414
|
Args:
|
415
415
|
normalize (bool): Whether to normalize the confusion matrix.
|
@@ -418,34 +418,63 @@ class ConfusionMatrix:
|
|
418
418
|
on_plot (func): An optional callback to pass plots path and data when they are rendered.
|
419
419
|
"""
|
420
420
|
import matplotlib.pyplot as plt # scope for faster 'import ultralytics'
|
421
|
-
import seaborn
|
422
421
|
|
423
422
|
array = self.matrix / ((self.matrix.sum(0).reshape(1, -1) + 1e-9) if normalize else 1) # normalize columns
|
424
423
|
array[array < 0.005] = np.nan # don't annotate (would appear as 0.00)
|
425
424
|
|
426
|
-
|
427
|
-
|
428
|
-
|
429
|
-
|
430
|
-
|
425
|
+
names = list(names)
|
426
|
+
fig, ax = plt.subplots(1, 1, figsize=(12, 9))
|
427
|
+
if self.nc >= 100: # downsample for large class count
|
428
|
+
k = max(2, self.nc // 60) # step size for downsampling, always > 1
|
429
|
+
keep_idx = slice(None, None, k) # create slice instead of array
|
430
|
+
names = names[keep_idx] # slice class names
|
431
|
+
array = array[keep_idx, :][:, keep_idx] # slice matrix rows and cols
|
432
|
+
n = (self.nc + k - 1) // k # number of retained classes
|
433
|
+
nc = nn = n if self.task == "classify" else n + 1 # adjust for background if needed
|
434
|
+
else:
|
435
|
+
nc = nn = self.nc if self.task == "classify" else self.nc + 1
|
436
|
+
ticklabels = (names + ["background"]) if (0 < nn < 99) and (nn == nc) else "auto"
|
437
|
+
xy_ticks = np.arange(len(ticklabels))
|
438
|
+
tick_fontsize = max(6, 15 - 0.1 * nc) # Minimum size is 6
|
439
|
+
label_fontsize = max(6, 12 - 0.1 * nc)
|
440
|
+
title_fontsize = max(6, 12 - 0.1 * nc)
|
441
|
+
btm = max(0.1, 0.25 - 0.001 * nc) # Minimum value is 0.1
|
431
442
|
with warnings.catch_warnings():
|
432
443
|
warnings.simplefilter("ignore") # suppress empty matrix RuntimeWarning: All-NaN slice encountered
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
|
438
|
-
|
439
|
-
|
440
|
-
|
441
|
-
|
442
|
-
|
443
|
-
|
444
|
-
|
444
|
+
im = ax.imshow(array, cmap="Blues", vmin=0.0, interpolation="none")
|
445
|
+
ax.xaxis.set_label_position("bottom")
|
446
|
+
if nc < 30: # Add score for each cell of confusion matrix
|
447
|
+
for i, row in enumerate(array[:nc]):
|
448
|
+
for j, val in enumerate(row[:nc]):
|
449
|
+
val = array[i, j]
|
450
|
+
if np.isnan(val):
|
451
|
+
continue
|
452
|
+
ax.text(
|
453
|
+
j,
|
454
|
+
i,
|
455
|
+
f"{val:.2f}" if normalize else f"{int(val)}",
|
456
|
+
ha="center",
|
457
|
+
va="center",
|
458
|
+
fontsize=10,
|
459
|
+
color="white" if val > (0.7 if normalize else 2) else "black",
|
460
|
+
)
|
461
|
+
cbar = fig.colorbar(im, ax=ax, fraction=0.046, pad=0.05)
|
445
462
|
title = "Confusion Matrix" + " Normalized" * normalize
|
446
|
-
ax.set_xlabel("True")
|
447
|
-
ax.set_ylabel("Predicted")
|
448
|
-
ax.set_title(title)
|
463
|
+
ax.set_xlabel("True", fontsize=label_fontsize, labelpad=10)
|
464
|
+
ax.set_ylabel("Predicted", fontsize=label_fontsize, labelpad=10)
|
465
|
+
ax.set_title(title, fontsize=title_fontsize, pad=20)
|
466
|
+
ax.set_xticks(xy_ticks)
|
467
|
+
ax.set_yticks(xy_ticks)
|
468
|
+
ax.tick_params(axis="x", bottom=True, top=False, labelbottom=True, labeltop=False)
|
469
|
+
ax.tick_params(axis="y", left=True, right=False, labelleft=True, labelright=False)
|
470
|
+
if ticklabels != "auto":
|
471
|
+
ax.set_xticklabels(ticklabels, fontsize=tick_fontsize, rotation=90, ha="center")
|
472
|
+
ax.set_yticklabels(ticklabels, fontsize=tick_fontsize)
|
473
|
+
for s in ["left", "right", "bottom", "top", "outline"]:
|
474
|
+
if s != "outline":
|
475
|
+
ax.spines[s].set_visible(False) # Confusion matrix plot don't have outline
|
476
|
+
cbar.ax.spines[s].set_visible(False)
|
477
|
+
fig.subplots_adjust(left=0, right=0.84, top=0.94, bottom=btm) # Adjust layout to ensure equal margins
|
449
478
|
plot_fname = Path(save_dir) / f"{title.lower().replace(' ', '_')}.png"
|
450
479
|
fig.savefig(plot_fname, dpi=250)
|
451
480
|
plt.close(fig)
|
ultralytics/utils/plotting.py
CHANGED
@@ -537,9 +537,9 @@ def plot_labels(boxes, cls, names=(), save_dir=Path(""), on_plot=None):
|
|
537
537
|
"""
|
538
538
|
import matplotlib.pyplot as plt # scope for faster 'import ultralytics'
|
539
539
|
import pandas
|
540
|
-
import
|
540
|
+
from matplotlib.colors import LinearSegmentedColormap
|
541
541
|
|
542
|
-
# Filter matplotlib>=3.7.2 warning
|
542
|
+
# Filter matplotlib>=3.7.2 warning
|
543
543
|
warnings.filterwarnings("ignore", category=UserWarning, message="The figure layout has changed to tight")
|
544
544
|
warnings.filterwarnings("ignore", category=FutureWarning)
|
545
545
|
|
@@ -549,12 +549,17 @@ def plot_labels(boxes, cls, names=(), save_dir=Path(""), on_plot=None):
|
|
549
549
|
boxes = boxes[:1000000] # limit to 1M boxes
|
550
550
|
x = pandas.DataFrame(boxes, columns=["x", "y", "width", "height"])
|
551
551
|
|
552
|
-
# Seaborn correlogram
|
553
|
-
|
554
|
-
|
555
|
-
|
552
|
+
try: # Seaborn correlogram
|
553
|
+
import seaborn
|
554
|
+
|
555
|
+
seaborn.pairplot(x, corner=True, diag_kind="auto", kind="hist", diag_kws=dict(bins=50), plot_kws=dict(pmax=0.9))
|
556
|
+
plt.savefig(save_dir / "labels_correlogram.jpg", dpi=200)
|
557
|
+
plt.close()
|
558
|
+
except ImportError:
|
559
|
+
pass # Skip if seaborn is not installed
|
556
560
|
|
557
561
|
# Matplotlib labels
|
562
|
+
subplot_3_4_color = LinearSegmentedColormap.from_list("white_blue", ["white", "blue"])
|
558
563
|
ax = plt.subplots(2, 2, figsize=(8, 8), tight_layout=True)[1].ravel()
|
559
564
|
y = ax[0].hist(cls, bins=np.linspace(0, nc, nc + 1) - 0.5, rwidth=0.8)
|
560
565
|
for i in range(nc):
|
@@ -565,18 +570,19 @@ def plot_labels(boxes, cls, names=(), save_dir=Path(""), on_plot=None):
|
|
565
570
|
ax[0].set_xticklabels(list(names.values()), rotation=90, fontsize=10)
|
566
571
|
else:
|
567
572
|
ax[0].set_xlabel("classes")
|
568
|
-
|
569
|
-
seaborn.histplot(x, x="width", y="height", ax=ax[3], bins=50, pmax=0.9)
|
570
|
-
|
571
|
-
# Rectangles
|
572
|
-
boxes[:, 0:2] = 0.5 # center
|
573
|
-
boxes = ops.xywh2xyxy(boxes) * 1000
|
573
|
+
boxes = np.column_stack([0.5 - boxes[:, 2:4] / 2, 0.5 + boxes[:, 2:4] / 2]) * 1000
|
574
574
|
img = Image.fromarray(np.ones((1000, 1000, 3), dtype=np.uint8) * 255)
|
575
575
|
for cls, box in zip(cls[:500], boxes[:500]):
|
576
576
|
ImageDraw.Draw(img).rectangle(box, width=1, outline=colors(cls)) # plot
|
577
577
|
ax[1].imshow(img)
|
578
578
|
ax[1].axis("off")
|
579
579
|
|
580
|
+
ax[2].hist2d(x["x"], x["y"], bins=50, cmap=subplot_3_4_color)
|
581
|
+
ax[2].set_xlabel("x")
|
582
|
+
ax[2].set_ylabel("y")
|
583
|
+
ax[3].hist2d(x["width"], x["height"], bins=50, cmap=subplot_3_4_color)
|
584
|
+
ax[3].set_xlabel("width")
|
585
|
+
ax[3].set_ylabel("height")
|
580
586
|
for a in [0, 1, 2, 3]:
|
581
587
|
for s in ["top", "right", "left", "bottom"]:
|
582
588
|
ax[a].spines[s].set_visible(False)
|
@@ -994,7 +1000,7 @@ def feature_visualization(x, module_type, stage, n=32, save_dir=Path("runs/detec
|
|
994
1000
|
if isinstance(x, torch.Tensor):
|
995
1001
|
_, channels, height, width = x.shape # batch, channels, height, width
|
996
1002
|
if height > 1 and width > 1:
|
997
|
-
f = save_dir / f"stage{stage}_{module_type.
|
1003
|
+
f = save_dir / f"stage{stage}_{module_type.rsplit('.', 1)[-1]}_features.png" # filename
|
998
1004
|
|
999
1005
|
blocks = torch.chunk(x[0].cpu(), channels, dim=0) # select batch index 0, block by channels
|
1000
1006
|
n = min(n, channels) # number of plots
|
ultralytics/utils/torch_utils.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
2
2
|
|
3
|
+
import functools
|
3
4
|
import gc
|
4
5
|
import math
|
5
6
|
import os
|
@@ -101,6 +102,7 @@ def autocast(enabled: bool, device: str = "cuda"):
|
|
101
102
|
return torch.cuda.amp.autocast(enabled)
|
102
103
|
|
103
104
|
|
105
|
+
@functools.lru_cache
|
104
106
|
def get_cpu_info():
|
105
107
|
"""Return a string with system CPU information, i.e. 'Apple M2'."""
|
106
108
|
from ultralytics.utils import PERSISTENT_CACHE # avoid circular import error
|
@@ -118,6 +120,7 @@ def get_cpu_info():
|
|
118
120
|
return PERSISTENT_CACHE.get("cpu_info", "unknown")
|
119
121
|
|
120
122
|
|
123
|
+
@functools.lru_cache
|
121
124
|
def get_gpu_info(index):
|
122
125
|
"""Return a string with system GPU information, i.e. 'Tesla T4, 15102MiB'."""
|
123
126
|
properties = torch.cuda.get_device_properties(index)
|
ultralytics/utils/triton.py
CHANGED
@@ -53,7 +53,7 @@ class TritonRemoteModel:
|
|
53
53
|
"""
|
54
54
|
if not endpoint and not scheme: # Parse all args from URL string
|
55
55
|
splits = urlsplit(url)
|
56
|
-
endpoint = splits.path.strip("/").split("/")[0]
|
56
|
+
endpoint = splits.path.strip("/").split("/", 1)[0]
|
57
57
|
scheme = splits.scheme
|
58
58
|
url = splits.netloc
|
59
59
|
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|