dgenerate-ultralytics-headless 8.3.137__py3-none-any.whl → 8.3.139__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.139.dist-info}/METADATA +1 -1
  2. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.139.dist-info}/RECORD +36 -36
  3. tests/test_python.py +6 -1
  4. tests/test_solutions.py +183 -8
  5. ultralytics/__init__.py +1 -1
  6. ultralytics/cfg/__init__.py +1 -1
  7. ultralytics/data/base.py +1 -1
  8. ultralytics/data/build.py +4 -3
  9. ultralytics/data/loaders.py +2 -2
  10. ultralytics/engine/exporter.py +5 -5
  11. ultralytics/engine/model.py +2 -2
  12. ultralytics/engine/predictor.py +3 -10
  13. ultralytics/engine/results.py +2 -209
  14. ultralytics/engine/trainer.py +1 -1
  15. ultralytics/engine/validator.py +1 -1
  16. ultralytics/hub/auth.py +2 -2
  17. ultralytics/hub/utils.py +8 -3
  18. ultralytics/models/yolo/classify/predict.py +11 -0
  19. ultralytics/models/yolo/obb/val.py +1 -1
  20. ultralytics/models/yolo/world/train.py +1 -1
  21. ultralytics/models/yolo/yoloe/val.py +3 -3
  22. ultralytics/solutions/similarity_search.py +3 -6
  23. ultralytics/solutions/streamlit_inference.py +1 -1
  24. ultralytics/utils/__init__.py +159 -1
  25. ultralytics/utils/callbacks/hub.py +5 -4
  26. ultralytics/utils/checks.py +25 -18
  27. ultralytics/utils/downloads.py +7 -5
  28. ultralytics/utils/export.py +1 -1
  29. ultralytics/utils/metrics.py +90 -5
  30. ultralytics/utils/plotting.py +1 -1
  31. ultralytics/utils/torch_utils.py +3 -0
  32. ultralytics/utils/triton.py +1 -1
  33. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.139.dist-info}/WHEEL +0 -0
  34. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.139.dist-info}/entry_points.txt +0 -0
  35. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.139.dist-info}/licenses/LICENSE +0 -0
  36. {dgenerate_ultralytics_headless-8.3.137.dist-info → dgenerate_ultralytics_headless-8.3.139.dist-info}/top_level.txt +0 -0
@@ -73,7 +73,7 @@ def parse_requirements(file_path=ROOT.parent / "requirements.txt", package=""):
73
73
  for line in requires:
74
74
  line = line.strip()
75
75
  if line and not line.startswith("#"):
76
- line = line.split("#")[0].strip() # ignore inline comments
76
+ line = line.partition("#")[0].strip() # ignore inline comments
77
77
  if match := re.match(r"([a-zA-Z0-9-_]+)\s*([<>!=~]+.*)?", line):
78
78
  requirements.append(SimpleNamespace(name=match[1], specifier=match[2].strip() if match[2] else ""))
79
79
 
@@ -379,7 +379,7 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
379
379
 
380
380
  pkgs = []
381
381
  for r in requirements:
382
- r_stripped = r.split("/")[-1].replace(".git", "") # replace git+https://org/repo.git -> 'repo'
382
+ r_stripped = r.rpartition("/")[-1].replace(".git", "") # replace git+https://org/repo.git -> 'repo'
383
383
  match = re.match(r"([a-zA-Z0-9-_]+)([<>!=~]+.*)?", r_stripped)
384
384
  name, required = match[1], match[2].strip() if match[2] else ""
385
385
  try:
@@ -388,21 +388,28 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
388
388
  pkgs.append(r)
389
389
 
390
390
  @Retry(times=2, delay=1)
391
- def attempt_install(packages, commands):
392
- """Attempt pip install command with retries on failure."""
393
- return subprocess.check_output(f"pip install --no-cache-dir {packages} {commands}", shell=True).decode()
391
+ def attempt_install(packages, commands, use_uv):
392
+ """Attempt package installation with uv if available, falling back to pip."""
393
+ if use_uv:
394
+ # Note requires --break-system-packages on ARM64 dockerfile
395
+ cmd = f"uv pip install --system --no-cache-dir {packages} {commands} --index-strategy=unsafe-best-match --break-system-packages --prerelease=allow"
396
+ else:
397
+ cmd = f"pip install --no-cache-dir {packages} {commands}"
398
+ return subprocess.check_output(cmd, shell=True).decode()
394
399
 
395
400
  s = " ".join(f'"{x}"' for x in pkgs) # console string
396
401
  if s:
397
402
  if install and AUTOINSTALL: # check environment variable
403
+ # Note uv fails on arm64 macOS and Raspberry Pi runners
404
+ uv = not ARM64 and subprocess.run(["command", "-v", "uv"], capture_output=True, shell=True).returncode == 0
398
405
  n = len(pkgs) # number of packages updates
399
406
  LOGGER.info(f"{prefix} Ultralytics requirement{'s' * (n > 1)} {pkgs} not found, attempting AutoUpdate...")
400
407
  try:
401
408
  t = time.time()
402
409
  assert ONLINE, "AutoUpdate skipped (offline)"
403
- LOGGER.info(attempt_install(s, cmds))
410
+ LOGGER.info(attempt_install(s, cmds, use_uv=uv))
404
411
  dt = time.time() - t
405
- LOGGER.info(f"{prefix} AutoUpdate success ✅ {dt:.1f}s, installed {n} package{'s' * (n > 1)}: {pkgs}")
412
+ LOGGER.info(f"{prefix} AutoUpdate success ✅ {dt:.1f}s")
406
413
  LOGGER.warning(
407
414
  f"{prefix} {colorstr('bold', 'Restart runtime or rerun command for updates to take effect')}\n"
408
415
  )
@@ -423,6 +430,7 @@ def check_torchvision():
423
430
  to the compatibility table based on: https://github.com/pytorch/vision#installation.
424
431
  """
425
432
  compatibility_table = {
433
+ "2.7": ["0.22"],
426
434
  "2.6": ["0.21"],
427
435
  "2.5": ["0.20"],
428
436
  "2.4": ["0.19"],
@@ -435,10 +443,10 @@ def check_torchvision():
435
443
  }
436
444
 
437
445
  # Check major and minor versions
438
- v_torch = ".".join(torch.__version__.split("+")[0].split(".")[:2])
446
+ v_torch = ".".join(torch.__version__.split("+", 1)[0].split(".")[:2])
439
447
  if v_torch in compatibility_table:
440
448
  compatible_versions = compatibility_table[v_torch]
441
- v_torchvision = ".".join(TORCHVISION_VERSION.split("+")[0].split(".")[:2])
449
+ v_torchvision = ".".join(TORCHVISION_VERSION.split("+", 1)[0].split(".")[:2])
442
450
  if all(v_torchvision != v for v in compatible_versions):
443
451
  LOGGER.warning(
444
452
  f"torchvision=={v_torchvision} is incompatible with torch=={v_torch}.\n"
@@ -461,9 +469,8 @@ def check_suffix(file="yolo11n.pt", suffix=".pt", msg=""):
461
469
  if isinstance(suffix, str):
462
470
  suffix = {suffix}
463
471
  for f in file if isinstance(file, (list, tuple)) else [file]:
464
- s = Path(f).suffix.lower().strip() # file suffix
465
- if len(s):
466
- assert s in suffix, f"{msg}{f} acceptable suffix is {suffix}, not {s}"
472
+ if s := str(f).rpartition(".")[-1].lower().strip(): # file suffix
473
+ assert f".{s}" in suffix, f"{msg}{f} acceptable suffix is {suffix}, not .{s}"
467
474
 
468
475
 
469
476
  def check_yolov5u_filename(file: str, verbose: bool = True):
@@ -504,10 +511,10 @@ def check_model_file_from_stem(model="yolo11n"):
504
511
  Returns:
505
512
  (str | Path): Model filename with appropriate suffix.
506
513
  """
507
- if model and not Path(model).suffix and Path(model).stem in downloads.GITHUB_ASSETS_STEMS:
508
- return Path(model).with_suffix(".pt") # add suffix, i.e. yolo11n -> yolo11n.pt
509
- else:
510
- return model
514
+ path = Path(model)
515
+ if not path.suffix and path.stem in downloads.GITHUB_ASSETS_STEMS:
516
+ return path.with_suffix(".pt") # add suffix, i.e. yolo11n -> yolo11n.pt
517
+ return model
511
518
 
512
519
 
513
520
  def check_file(file, suffix="", download=True, download_dir=".", hard=True):
@@ -655,7 +662,7 @@ def collect_system_info():
655
662
  from ultralytics.utils.torch_utils import get_cpu_info, get_gpu_info
656
663
 
657
664
  gib = 1 << 30 # bytes per GiB
658
- cuda = torch and torch.cuda.is_available()
665
+ cuda = torch.cuda.is_available()
659
666
  check_yolo()
660
667
  total, used, free = shutil.disk_usage("/")
661
668
 
@@ -837,7 +844,7 @@ def cuda_device_count() -> int:
837
844
  )
838
845
 
839
846
  # Take the first line and strip any leading/trailing white space
840
- first_line = output.strip().split("\n")[0]
847
+ first_line = output.strip().split("\n", 1)[0]
841
848
 
842
849
  return int(first_line)
843
850
  except (subprocess.CalledProcessError, FileNotFoundError, ValueError):
@@ -32,11 +32,13 @@ GITHUB_ASSETS_NAMES = frozenset(
32
32
  + [f"sam2.1_{k}.pt" for k in "blst"]
33
33
  + [f"FastSAM-{k}.pt" for k in "sx"]
34
34
  + [f"rtdetr-{k}.pt" for k in "lx"]
35
- + ["mobile_sam.pt"]
36
- + ["mobileclip_blt.ts"]
37
- + ["calibration_image_sample_data_20x128x128x3_float32.npy.zip"]
35
+ + [
36
+ "mobile_sam.pt",
37
+ "mobileclip_blt.ts",
38
+ "calibration_image_sample_data_20x128x128x3_float32.npy.zip",
39
+ ]
38
40
  )
39
- GITHUB_ASSETS_STEMS = frozenset(k.rsplit(".", 1)[0] for k in GITHUB_ASSETS_NAMES)
41
+ GITHUB_ASSETS_STEMS = frozenset(k.rpartition(".")[0] for k in GITHUB_ASSETS_NAMES)
40
42
 
41
43
 
42
44
  def is_url(url, check=False):
@@ -247,7 +249,7 @@ def get_google_drive_file_info(link):
247
249
  """
248
250
  import requests # slow import
249
251
 
250
- file_id = link.split("/d/")[1].split("/view")[0]
252
+ file_id = link.split("/d/")[1].split("/view", 1)[0]
251
253
  drive_url = f"https://drive.google.com/uc?export=download&id={file_id}"
252
254
  filename = None
253
255
 
@@ -97,7 +97,7 @@ def export_engine(
97
97
  builder = trt.Builder(logger)
98
98
  config = builder.create_builder_config()
99
99
  workspace = int((workspace or 0) * (1 << 30))
100
- is_trt10 = int(trt.__version__.split(".")[0]) >= 10 # is TensorRT >= 10
100
+ is_trt10 = int(trt.__version__.split(".", 1)[0]) >= 10 # is TensorRT >= 10
101
101
  if is_trt10 and workspace > 0:
102
102
  config.set_memory_pool_limit(trt.MemoryPoolType.WORKSPACE, workspace)
103
103
  elif workspace > 0: # TensorRT versions 7, 8
@@ -8,7 +8,7 @@ from pathlib import Path
8
8
  import numpy as np
9
9
  import torch
10
10
 
11
- from ultralytics.utils import LOGGER, SimpleClass, TryExcept, checks, plt_settings
11
+ from ultralytics.utils import LOGGER, DataExportMixin, SimpleClass, TryExcept, checks, plt_settings
12
12
 
13
13
  OKS_SIGMA = (
14
14
  np.array([0.26, 0.25, 0.25, 0.35, 0.35, 0.79, 0.79, 0.72, 0.72, 0.62, 0.62, 1.07, 1.07, 0.87, 0.87, 0.89, 0.89])
@@ -865,7 +865,7 @@ class Metric(SimpleClass):
865
865
  ]
866
866
 
867
867
 
868
- class DetMetrics(SimpleClass):
868
+ class DetMetrics(SimpleClass, DataExportMixin):
869
869
  """
870
870
  Utility class for computing detection metrics such as precision, recall, and mean average precision (mAP).
871
871
 
@@ -961,8 +961,29 @@ class DetMetrics(SimpleClass):
961
961
  """Return dictionary of computed performance metrics and statistics."""
962
962
  return self.box.curves_results
963
963
 
964
+ def summary(self, **kwargs):
965
+ """Returns per-class detection metrics with shared scalar values included."""
966
+ scalars = {
967
+ "box-map": self.box.map,
968
+ "box-map50": self.box.map50,
969
+ "box-map75": self.box.map75,
970
+ }
971
+ per_class = {
972
+ "box-p": self.box.p,
973
+ "box-r": self.box.r,
974
+ "box-f1": self.box.f1,
975
+ }
976
+ return [
977
+ {
978
+ "class_name": self.names[i] if hasattr(self, "names") and i in self.names else str(i),
979
+ **{k: v[i] for k, v in per_class.items()},
980
+ **scalars,
981
+ }
982
+ for i in range(len(next(iter(per_class.values()), [])))
983
+ ]
964
984
 
965
- class SegmentMetrics(SimpleClass):
985
+
986
+ class SegmentMetrics(SimpleClass, DataExportMixin):
966
987
  """
967
988
  Calculates and aggregates detection and segmentation metrics over a given set of classes.
968
989
 
@@ -1097,6 +1118,29 @@ class SegmentMetrics(SimpleClass):
1097
1118
  """Return dictionary of computed performance metrics and statistics."""
1098
1119
  return self.box.curves_results + self.seg.curves_results
1099
1120
 
1121
+ def summary(self, **kwargs):
1122
+ """Returns per-class segmentation metrics with shared scalar values included (box + mask)."""
1123
+ scalars = {
1124
+ "box-map": self.box.map,
1125
+ "box-map50": self.box.map50,
1126
+ "box-map75": self.box.map75,
1127
+ "mask-map": self.seg.map,
1128
+ "mask-map50": self.seg.map50,
1129
+ "mask-map75": self.seg.map75,
1130
+ }
1131
+ per_class = {
1132
+ "box-p": self.box.p,
1133
+ "box-r": self.box.r,
1134
+ "box-f1": self.box.f1,
1135
+ "mask-p": self.seg.p,
1136
+ "mask-r": self.seg.r,
1137
+ "mask-f1": self.seg.f1,
1138
+ }
1139
+ return [
1140
+ {"class_name": self.names[i], **{k: v[i] for k, v in per_class.items()}, **scalars}
1141
+ for i in range(len(next(iter(per_class.values()), [])))
1142
+ ]
1143
+
1100
1144
 
1101
1145
  class PoseMetrics(SegmentMetrics):
1102
1146
  """
@@ -1229,8 +1273,31 @@ class PoseMetrics(SegmentMetrics):
1229
1273
  """Return dictionary of computed performance metrics and statistics."""
1230
1274
  return self.box.curves_results + self.pose.curves_results
1231
1275
 
1276
+ def summary(self, **kwargs):
1277
+ """Returns per-class pose metrics with shared scalar values included (box + pose)."""
1278
+ scalars = {
1279
+ "box-map": self.box.map,
1280
+ "box-map50": self.box.map50,
1281
+ "box-map75": self.box.map75,
1282
+ "pose-map": self.pose.map,
1283
+ "pose-map50": self.pose.map50,
1284
+ "pose-map75": self.pose.map75,
1285
+ }
1286
+ per_class = {
1287
+ "box-p": self.box.p,
1288
+ "box-r": self.box.r,
1289
+ "box-f1": self.box.f1,
1290
+ "pose-p": self.pose.p,
1291
+ "pose-r": self.pose.r,
1292
+ "pose-f1": self.pose.f1,
1293
+ }
1294
+ return [
1295
+ {"class_name": self.names[i], **{k: v[i] for k, v in per_class.items()}, **scalars}
1296
+ for i in range(len(next(iter(per_class.values()), [])))
1297
+ ]
1298
+
1232
1299
 
1233
- class ClassifyMetrics(SimpleClass):
1300
+ class ClassifyMetrics(SimpleClass, DataExportMixin):
1234
1301
  """
1235
1302
  Class for computing classification metrics including top-1 and top-5 accuracy.
1236
1303
 
@@ -1286,8 +1353,12 @@ class ClassifyMetrics(SimpleClass):
1286
1353
  """Return a list of curves for accessing specific metrics curves."""
1287
1354
  return []
1288
1355
 
1356
+ def summary(self, **kwargs):
1357
+ """Returns a single-row summary for classification metrics (top1/top5)."""
1358
+ return [{"classify-top1": self.top1, "classify-top5": self.top5}]
1359
+
1289
1360
 
1290
- class OBBMetrics(SimpleClass):
1361
+ class OBBMetrics(SimpleClass, DataExportMixin):
1291
1362
  """
1292
1363
  Metrics for evaluating oriented bounding box (OBB) detection.
1293
1364
 
@@ -1316,6 +1387,7 @@ class OBBMetrics(SimpleClass):
1316
1387
  self.names = names
1317
1388
  self.box = Metric()
1318
1389
  self.speed = {"preprocess": 0.0, "inference": 0.0, "loss": 0.0, "postprocess": 0.0}
1390
+ self.task = "obb"
1319
1391
 
1320
1392
  def process(self, tp, conf, pred_cls, target_cls, on_plot=None):
1321
1393
  """
@@ -1383,3 +1455,16 @@ class OBBMetrics(SimpleClass):
1383
1455
  def curves_results(self):
1384
1456
  """Return a list of curves for accessing specific metrics curves."""
1385
1457
  return []
1458
+
1459
+ def summary(self, **kwargs):
1460
+ """Returns per-class detection metrics with shared scalar values included."""
1461
+ scalars = {
1462
+ "box-map": self.box.map,
1463
+ "box-map50": self.box.map50,
1464
+ "box-map75": self.box.map75,
1465
+ }
1466
+ per_class = {"box-p": self.box.p, "box-r": self.box.r, "box-f1": self.box.f1}
1467
+ return [
1468
+ {"class_name": self.names[i], **{k: v[i] for k, v in per_class.items()}, **scalars}
1469
+ for i in range(len(next(iter(per_class.values()), [])))
1470
+ ]
@@ -1000,7 +1000,7 @@ def feature_visualization(x, module_type, stage, n=32, save_dir=Path("runs/detec
1000
1000
  if isinstance(x, torch.Tensor):
1001
1001
  _, channels, height, width = x.shape # batch, channels, height, width
1002
1002
  if height > 1 and width > 1:
1003
- f = save_dir / f"stage{stage}_{module_type.split('.')[-1]}_features.png" # filename
1003
+ f = save_dir / f"stage{stage}_{module_type.rsplit('.', 1)[-1]}_features.png" # filename
1004
1004
 
1005
1005
  blocks = torch.chunk(x[0].cpu(), channels, dim=0) # select batch index 0, block by channels
1006
1006
  n = min(n, channels) # number of plots
@@ -1,5 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
+ import functools
3
4
  import gc
4
5
  import math
5
6
  import os
@@ -101,6 +102,7 @@ def autocast(enabled: bool, device: str = "cuda"):
101
102
  return torch.cuda.amp.autocast(enabled)
102
103
 
103
104
 
105
+ @functools.lru_cache
104
106
  def get_cpu_info():
105
107
  """Return a string with system CPU information, i.e. 'Apple M2'."""
106
108
  from ultralytics.utils import PERSISTENT_CACHE # avoid circular import error
@@ -118,6 +120,7 @@ def get_cpu_info():
118
120
  return PERSISTENT_CACHE.get("cpu_info", "unknown")
119
121
 
120
122
 
123
+ @functools.lru_cache
121
124
  def get_gpu_info(index):
122
125
  """Return a string with system GPU information, i.e. 'Tesla T4, 15102MiB'."""
123
126
  properties = torch.cuda.get_device_properties(index)
@@ -53,7 +53,7 @@ class TritonRemoteModel:
53
53
  """
54
54
  if not endpoint and not scheme: # Parse all args from URL string
55
55
  splits = urlsplit(url)
56
- endpoint = splits.path.strip("/").split("/")[0]
56
+ endpoint = splits.path.strip("/").split("/", 1)[0]
57
57
  scheme = splits.scheme
58
58
  url = splits.netloc
59
59