ultralytics-opencv-headless 8.4.8__py3-none-any.whl → 8.4.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tests/test_cuda.py CHANGED
@@ -12,7 +12,7 @@ from ultralytics import YOLO
12
12
  from ultralytics.cfg import TASK2DATA, TASK2MODEL, TASKS
13
13
  from ultralytics.utils import ASSETS, IS_JETSON, WEIGHTS_DIR
14
14
  from ultralytics.utils.autodevice import GPUInfo
15
- from ultralytics.utils.checks import check_amp
15
+ from ultralytics.utils.checks import check_amp, check_tensorrt
16
16
  from ultralytics.utils.torch_utils import TORCH_1_13
17
17
 
18
18
  # Try to find idle devices if CUDA is available
@@ -91,6 +91,7 @@ def test_export_onnx_matrix(task, dynamic, int8, half, batch, simplify, nms):
91
91
  )
92
92
  def test_export_engine_matrix(task, dynamic, int8, half, batch):
93
93
  """Test YOLO model export to TensorRT format for various configurations and run inference."""
94
+ check_tensorrt()
94
95
  import tensorrt as trt
95
96
 
96
97
  is_trt10 = int(trt.__version__.split(".", 1)[0]) >= 10
@@ -120,7 +121,7 @@ def test_train():
120
121
  device = tuple(DEVICES) if len(DEVICES) > 1 else DEVICES[0]
121
122
  # NVIDIA Jetson only has one GPU and therefore skipping checks
122
123
  if not IS_JETSON:
123
- results = YOLO(MODEL).train(data="coco8.yaml", imgsz=64, epochs=1, device=device, batch=15)
124
+ results = YOLO(MODEL).train(data="coco8.yaml", imgsz=64, epochs=1, device=device, batch=15, compile=True)
124
125
  results = YOLO(MODEL).train(data="coco128.yaml", imgsz=64, epochs=1, device=device, batch=15, val=False)
125
126
  visible = eval(os.environ["CUDA_VISIBLE_DEVICES"])
126
127
  assert visible == device, f"Passed GPUs '{device}', but used GPUs '{visible}'"
tests/test_python.py CHANGED
@@ -702,8 +702,7 @@ def test_yoloe(tmp_path):
702
702
  # Predict
703
703
  # text-prompts
704
704
  model = YOLO(WEIGHTS_DIR / "yoloe-11s-seg.pt")
705
- names = ["person", "bus"]
706
- model.set_classes(names, model.get_text_pe(names))
705
+ model.set_classes(["person", "bus"])
707
706
  model(SOURCE, conf=0.01)
708
707
 
709
708
  from ultralytics import YOLOE
ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- __version__ = "8.4.8"
3
+ __version__ = "8.4.10"
4
4
 
5
5
  import importlib
6
6
  import os
@@ -1745,7 +1745,7 @@ class CopyPaste(BaseMixTransform):
1745
1745
  instances.convert_bbox(format="xyxy")
1746
1746
  instances.denormalize(w, h)
1747
1747
 
1748
- im_new = np.zeros(im.shape, np.uint8)
1748
+ im_new = np.zeros(im.shape[:2], np.uint8)
1749
1749
  instances2 = labels2.pop("instances", None)
1750
1750
  if instances2 is None:
1751
1751
  instances2 = deepcopy(instances)
@@ -1758,7 +1758,7 @@ class CopyPaste(BaseMixTransform):
1758
1758
  for j in indexes[: round(self.p * n)]:
1759
1759
  cls = np.concatenate((cls, labels2.get("cls", cls)[[j]]), axis=0)
1760
1760
  instances = Instances.concatenate((instances, instances2[[j]]), axis=0)
1761
- cv2.drawContours(im_new, instances2.segments[[j]].astype(np.int32), -1, (1, 1, 1), cv2.FILLED)
1761
+ cv2.drawContours(im_new, instances2.segments[[j]].astype(np.int32), -1, 1, cv2.FILLED)
1762
1762
 
1763
1763
  result = labels2.get("img", cv2.flip(im, 1)) # augment segments
1764
1764
  if result.ndim == 2: # cv2.flip would eliminate the last dimension for grayscale images
@@ -87,7 +87,6 @@ from ultralytics.utils import (
87
87
  IS_COLAB,
88
88
  IS_DEBIAN_BOOKWORM,
89
89
  IS_DEBIAN_TRIXIE,
90
- IS_DOCKER,
91
90
  IS_JETSON,
92
91
  IS_RASPBERRYPI,
93
92
  IS_UBUNTU,
@@ -103,13 +102,16 @@ from ultralytics.utils import (
103
102
  callbacks,
104
103
  colorstr,
105
104
  get_default_args,
105
+ is_jetson,
106
106
  )
107
107
  from ultralytics.utils.checks import (
108
108
  IS_PYTHON_3_10,
109
109
  IS_PYTHON_MINIMUM_3_9,
110
110
  check_apt_requirements,
111
+ check_executorch_requirements,
111
112
  check_imgsz,
112
113
  check_requirements,
114
+ check_tensorrt,
113
115
  check_version,
114
116
  is_intel,
115
117
  is_sudo_available,
@@ -1002,12 +1004,15 @@ class Exporter:
1002
1004
  assert self.im.device.type != "cpu", "export running on CPU but must be on GPU, i.e. use 'device=0'"
1003
1005
  f_onnx = self.export_onnx() # run before TRT import https://github.com/ultralytics/ultralytics/issues/7016
1004
1006
 
1007
+ # Force re-install TensorRT on CUDA 13 ARM devices to 10.15.x versions for RT-DETR exports
1008
+ # https://github.com/ultralytics/ultralytics/issues/22873
1009
+ if is_jetson(jetpack=7):
1010
+ check_tensorrt("10.15")
1011
+
1005
1012
  try:
1006
1013
  import tensorrt as trt
1007
1014
  except ImportError:
1008
- if LINUX:
1009
- cuda_version = torch.version.cuda.split(".")[0]
1010
- check_requirements(f"tensorrt-cu{cuda_version}>7.0.0,!=10.1.0")
1015
+ check_tensorrt()
1011
1016
  import tensorrt as trt
1012
1017
  check_version(trt.__version__, ">=7.0.0", hard=True)
1013
1018
  check_version(trt.__version__, "!=10.1.0", msg="https://github.com/ultralytics/ultralytics/pull/14239")
@@ -1197,16 +1202,9 @@ class Exporter:
1197
1202
  following Ultralytics conventions.
1198
1203
  """
1199
1204
  LOGGER.info(f"\n{prefix} starting export with ExecuTorch...")
1200
- assert TORCH_2_9, f"ExecuTorch export requires torch>=2.9.0 but torch=={TORCH_VERSION} is installed"
1201
-
1202
- # BUG executorch build on arm64 Docker requires packaging>=22.0 https://github.com/pypa/setuptools/issues/4483
1203
- if LINUX and ARM64 and IS_DOCKER:
1204
- check_requirements("packaging>=22.0")
1205
+ assert TORCH_2_9, f"ExecuTorch requires torch>=2.9.0 but torch=={TORCH_VERSION} is installed"
1205
1206
 
1206
- check_requirements("ruamel.yaml<0.19.0")
1207
- check_requirements("executorch==1.0.1", "flatbuffers")
1208
- # Pin numpy to avoid coremltools errors with numpy>=2.4.0, must be separate
1209
- check_requirements("numpy<=2.3.5")
1207
+ check_executorch_requirements()
1210
1208
 
1211
1209
  from executorch.backends.xnnpack.partition.xnnpack_partitioner import XnnpackPartitioner
1212
1210
  from executorch.exir import to_edge_transform_and_lower
@@ -1402,7 +1400,7 @@ class Exporter:
1402
1400
  nms.confidenceThresholdInputFeatureName = "confidenceThreshold"
1403
1401
  nms.iouThreshold = self.args.iou
1404
1402
  nms.confidenceThreshold = self.args.conf
1405
- nms.pickTop.perClass = True
1403
+ nms.pickTop.perClass = not self.args.agnostic_nms
1406
1404
  nms.stringClassLabels.vector.extend(names.values())
1407
1405
  nms_model = ct.models.MLModel(nms_spec)
1408
1406
 
@@ -12,7 +12,7 @@ from urllib.parse import parse_qs, urlparse
12
12
 
13
13
  from ultralytics import __version__
14
14
  from ultralytics.hub.utils import HELP_MSG, HUB_WEB_ROOT, PREFIX
15
- from ultralytics.utils import IS_COLAB, LOGGER, SETTINGS, TQDM, checks, emojis
15
+ from ultralytics.utils import IS_COLAB, LOGGER, SETTINGS, TQDM, checks
16
16
  from ultralytics.utils.errors import HUBModelError
17
17
 
18
18
  AGENT_NAME = f"python-{__version__}-colab" if IS_COLAB else f"python-{__version__}-local"
@@ -121,7 +121,7 @@ class HUBTrainingSession:
121
121
  """
122
122
  self.model = self.client.model(model_id)
123
123
  if not self.model.data: # then model does not exist
124
- raise ValueError(emojis("❌ The specified HUB model does not exist")) # TODO: improve error handling
124
+ raise HUBModelError(f"❌ Model not found: '{model_id}'. Verify the model ID is correct.")
125
125
 
126
126
  self.model_url = f"{HUB_WEB_ROOT}/models/{self.model.id}"
127
127
  if self.model.is_trained():
@@ -167,10 +167,8 @@ class HUBTrainingSession:
167
167
 
168
168
  self.model.create_model(payload)
169
169
 
170
- # Model could not be created
171
- # TODO: improve error handling
172
170
  if not self.model.id:
173
- return None
171
+ raise HUBModelError(f"❌ Failed to create model '{self.filename}' on Ultralytics HUB. Please try again.")
174
172
 
175
173
  self.model_url = f"{HUB_WEB_ROOT}/models/{self.model.id}"
176
174
 
@@ -2619,7 +2619,7 @@ class SAM3VideoSemanticPredictor(SAM3SemanticPredictor):
2619
2619
  if not isinstance(orig_imgs, list): # input images are a torch.Tensor, not a list
2620
2620
  orig_imgs = ops.convert_torch2numpy_batch(orig_imgs)
2621
2621
 
2622
- names = []
2622
+ names = self.model.names if self.model.names != "visual" else {}
2623
2623
  if len(curr_obj_ids) == 0:
2624
2624
  pred_masks, pred_boxes = None, torch.zeros((0, 7), device=self.device)
2625
2625
  else:
@@ -2638,6 +2638,8 @@ class SAM3VideoSemanticPredictor(SAM3SemanticPredictor):
2638
2638
  pred_boxes = torch.cat(
2639
2639
  [pred_boxes, pred_ids[keep][:, None], pred_scores[keep][..., None], pred_cls[keep][..., None]], dim=-1
2640
2640
  )
2641
+ if pred_boxes.shape[0]:
2642
+ names = names or dict(enumerate(str(i) for i in range(pred_boxes[:, 6].int().max() + 1)))
2641
2643
  if pred_masks.shape[0] > 1:
2642
2644
  tracker_scores = torch.tensor(
2643
2645
  [
@@ -2657,7 +2659,6 @@ class SAM3VideoSemanticPredictor(SAM3SemanticPredictor):
2657
2659
  background_value=0,
2658
2660
  ).squeeze(1)
2659
2661
  ) > 0
2660
- names = self.model.names or dict(enumerate(str(i) for i in range(pred_boxes[:, 6].int().max())))
2661
2662
 
2662
2663
  results = []
2663
2664
  for masks, boxes, orig_img, img_path in zip([pred_masks], [pred_boxes], orig_imgs, self.batch[0]):
@@ -73,7 +73,7 @@ class DetectionTrainer(BaseTrainer):
73
73
  Returns:
74
74
  (Dataset): YOLO dataset object configured for the specified mode.
75
75
  """
76
- gs = max(int(unwrap_model(self.model).stride.max() if self.model else 0), 32)
76
+ gs = max(int(unwrap_model(self.model).stride.max()), 32)
77
77
  return build_yolo_dataset(self.args, img_path, batch, self.data, mode=mode, rect=mode == "val", stride=gs)
78
78
 
79
79
  def get_dataloader(self, dataset_path: str, batch_size: int = 16, rank: int = 0, mode: str = "train"):
@@ -426,5 +426,6 @@ class YOLOE(Model):
426
426
  self.predictor = None # reset predictor
427
427
  elif isinstance(self.predictor, yolo.yoloe.YOLOEVPDetectPredictor):
428
428
  self.predictor = None # reset predictor if no visual prompts
429
+ self.overrides["agnostic_nms"] = True # use agnostic nms for YOLOE default
429
430
 
430
431
  return super().predict(source, stream, **kwargs)
@@ -16,8 +16,24 @@ import torch
16
16
  import torch.nn as nn
17
17
  from PIL import Image
18
18
 
19
- from ultralytics.utils import ARM64, IS_JETSON, LINUX, LOGGER, PYTHON_VERSION, ROOT, YAML, is_jetson
20
- from ultralytics.utils.checks import check_requirements, check_suffix, check_version, check_yaml, is_rockchip
19
+ from ultralytics.utils import (
20
+ ARM64,
21
+ IS_JETSON,
22
+ LINUX,
23
+ LOGGER,
24
+ PYTHON_VERSION,
25
+ ROOT,
26
+ YAML,
27
+ is_jetson,
28
+ )
29
+ from ultralytics.utils.checks import (
30
+ check_executorch_requirements,
31
+ check_requirements,
32
+ check_suffix,
33
+ check_version,
34
+ check_yaml,
35
+ is_rockchip,
36
+ )
21
37
  from ultralytics.utils.downloads import attempt_download_asset, is_url
22
38
  from ultralytics.utils.nms import non_max_suppression
23
39
 
@@ -323,7 +339,7 @@ class AutoBackend(nn.Module):
323
339
  batch = metadata["batch"]
324
340
  dynamic = metadata.get("args", {}).get("dynamic", dynamic)
325
341
  # OpenVINO inference modes are 'LATENCY', 'THROUGHPUT' (not recommended), or 'CUMULATIVE_THROUGHPUT'
326
- inference_mode = "CUMULATIVE_THROUGHPUT" if batch > 1 and dynamic else "LATENCY"
342
+ inference_mode = "CUMULATIVE_THROUGHPUT" if dynamic and batch > 1 else "LATENCY"
327
343
  ov_compiled_model = core.compile_model(
328
344
  ov_model,
329
345
  device_name=device_name,
@@ -616,9 +632,9 @@ class AutoBackend(nn.Module):
616
632
  # ExecuTorch
617
633
  elif pte:
618
634
  LOGGER.info(f"Loading {w} for ExecuTorch inference...")
619
- # TorchAO release compatibility table bug https://github.com/pytorch/ao/issues/2919
620
- check_requirements("setuptools<71.0.0") # Setuptools bug: https://github.com/pypa/setuptools/issues/4483
621
- check_requirements(("executorch==1.0.1", "flatbuffers"))
635
+
636
+ check_executorch_requirements()
637
+
622
638
  from executorch.runtime import Runtime
623
639
 
624
640
  w = Path(w)
@@ -762,7 +762,7 @@ def is_jetson(jetpack=None) -> bool:
762
762
  if jetson and jetpack:
763
763
  try:
764
764
  content = open("/etc/nv_tegra_release").read()
765
- version_map = {4: "R32", 5: "R35", 6: "R36"} # JetPack to L4T major version mapping
765
+ version_map = {4: "R32", 5: "R35", 6: "R36", 7: "R38"} # JetPack to L4T major version mapping
766
766
  return jetpack in version_map and version_map[jetpack] in content
767
767
  except Exception:
768
768
  return False
@@ -29,6 +29,7 @@ from ultralytics.utils import (
29
29
  AUTOINSTALL,
30
30
  GIT,
31
31
  IS_COLAB,
32
+ IS_DOCKER,
32
33
  IS_JETSON,
33
34
  IS_KAGGLE,
34
35
  IS_PIP_PACKAGE,
@@ -495,6 +496,28 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
495
496
  return True
496
497
 
497
498
 
499
+ def check_executorch_requirements():
500
+ """Check and install ExecuTorch requirements including platform-specific dependencies."""
501
+ # BUG executorch build on arm64 Docker requires packaging>=22.0 https://github.com/pypa/setuptools/issues/4483
502
+ if LINUX and ARM64 and IS_DOCKER:
503
+ check_requirements("packaging>=22.0")
504
+
505
+ check_requirements("executorch", cmds=f"torch=={TORCH_VERSION.split('+')[0]}")
506
+ # Pin numpy to avoid coremltools errors with numpy>=2.4.0, must be separate
507
+ check_requirements("numpy<=2.3.5")
508
+
509
+
510
+ def check_tensorrt(min_version: str = "7.0.0"):
511
+ """Check and install TensorRT requirements including platform-specific dependencies.
512
+
513
+ Args:
514
+ min_version (str): Minimum supported TensorRT version (default: "7.0.0").
515
+ """
516
+ if LINUX:
517
+ cuda_version = torch.version.cuda.split(".")[0]
518
+ check_requirements(f"tensorrt-cu{cuda_version}>={min_version},!=10.1.0")
519
+
520
+
498
521
  def check_torchvision():
499
522
  """Check the installed versions of PyTorch and Torchvision to ensure they're compatible.
500
523
 
@@ -546,7 +569,7 @@ def check_suffix(file="yolo26n.pt", suffix=".pt", msg=""):
546
569
  assert f".{s}" in suffix, f"{msg}{f} acceptable suffix is {suffix}, not .{s}"
547
570
 
548
571
 
549
- def check_yolov5u_filename(file: str, verbose: bool = True):
572
+ def check_yolov5u_filename(file: str, verbose: bool = True) -> str:
550
573
  """Replace legacy YOLOv5 filenames with updated YOLOv5u filenames.
551
574
 
552
575
  Args:
@@ -573,7 +596,7 @@ def check_yolov5u_filename(file: str, verbose: bool = True):
573
596
  return file
574
597
 
575
598
 
576
- def check_model_file_from_stem(model="yolo11n"):
599
+ def check_model_file_from_stem(model: str = "yolo11n") -> str | Path:
577
600
  """Return a model filename from a valid model stem.
578
601
 
579
602
  Args:
@@ -619,6 +642,9 @@ def check_file(file, suffix="", download=True, download_dir=".", hard=True):
619
642
  # Use URI path for unique directory structure: ul://user/project/model -> user/project/model/filename.pt
620
643
  uri_path = file[5:] # Remove "ul://"
621
644
  local_file = Path(download_dir) / uri_path / url2file(url)
645
+ # Always re-download NDJSON datasets (cheap, ensures fresh data after updates)
646
+ if local_file.suffix == ".ndjson":
647
+ local_file.unlink(missing_ok=True)
622
648
  if local_file.exists():
623
649
  LOGGER.info(f"Found {clean_url(url)} locally at {local_file}")
624
650
  else:
@@ -660,7 +686,7 @@ def check_yaml(file, suffix=(".yaml", ".yml"), hard=True):
660
686
  return check_file(file, suffix, hard=hard)
661
687
 
662
688
 
663
- def check_is_path_safe(basedir, path):
689
+ def check_is_path_safe(basedir: Path | str, path: Path | str) -> bool:
664
690
  """Check if the resolved path is under the intended directory to prevent path traversal.
665
691
 
666
692
  Args:
ultralytics/utils/dist.py CHANGED
@@ -1,13 +1,19 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
+ from __future__ import annotations
4
+
3
5
  import os
4
6
  import shutil
5
7
  import sys
6
8
  import tempfile
9
+ from typing import TYPE_CHECKING
7
10
 
8
11
  from . import USER_CONFIG_DIR
9
12
  from .torch_utils import TORCH_1_9
10
13
 
14
+ if TYPE_CHECKING:
15
+ from ultralytics.engine.trainer import BaseTrainer
16
+
11
17
 
12
18
  def find_free_network_port() -> int:
13
19
  """Find a free port on localhost.
@@ -25,7 +31,7 @@ def find_free_network_port() -> int:
25
31
  return s.getsockname()[1] # port
26
32
 
27
33
 
28
- def generate_ddp_file(trainer):
34
+ def generate_ddp_file(trainer: BaseTrainer) -> str:
29
35
  """Generate a DDP (Distributed Data Parallel) file for multi-GPU training.
30
36
 
31
37
  This function creates a temporary Python file that enables distributed training across multiple GPUs. The file
@@ -75,7 +81,7 @@ if __name__ == "__main__":
75
81
  return file.name
76
82
 
77
83
 
78
- def generate_ddp_command(trainer):
84
+ def generate_ddp_command(trainer: BaseTrainer) -> tuple[list[str], str]:
79
85
  """Generate command for distributed training.
80
86
 
81
87
  Args:
@@ -105,7 +111,7 @@ def generate_ddp_command(trainer):
105
111
  return cmd, file
106
112
 
107
113
 
108
- def ddp_cleanup(trainer, file):
114
+ def ddp_cleanup(trainer: BaseTrainer, file: str) -> None:
109
115
  """Delete temporary file if created during distributed data parallel (DDP) training.
110
116
 
111
117
  This function checks if the provided file contains the trainer's ID in its name, indicating it was created as a
@@ -408,7 +408,7 @@ class Instances:
408
408
  good = self.bbox_areas > 0
409
409
  if not all(good):
410
410
  self._bboxes = self._bboxes[good]
411
- if len(self.segments):
411
+ if self.segments is not None and len(self.segments):
412
412
  self.segments = self.segments[good]
413
413
  if self.keypoints is not None:
414
414
  self.keypoints = self.keypoints[good]
@@ -482,3 +482,16 @@ class Instances:
482
482
  def bboxes(self) -> np.ndarray:
483
483
  """Return bounding boxes."""
484
484
  return self._bboxes.bboxes
485
+
486
+ def __repr__(self) -> str:
487
+ """Return a string representation of the Instances object."""
488
+ # Map private to public names and include direct attributes
489
+ attr_map = {"_bboxes": "bboxes"}
490
+ parts = []
491
+ for key, value in self.__dict__.items():
492
+ name = attr_map.get(key, key)
493
+ if name == "bboxes":
494
+ value = self.bboxes # Use the property
495
+ if value is not None:
496
+ parts.append(f"{name}={value!r}")
497
+ return "Instances({})".format("\n".join(parts))
ultralytics/utils/loss.py CHANGED
@@ -1193,8 +1193,6 @@ class TVPDetectLoss:
1193
1193
 
1194
1194
  def loss(self, preds: dict[str, torch.Tensor], batch: dict[str, torch.Tensor]) -> tuple[torch.Tensor, torch.Tensor]:
1195
1195
  """Calculate the loss for text-visual prompt detection."""
1196
- assert self.ori_reg_max == self.vp_criterion.reg_max # TODO: remove it
1197
-
1198
1196
  if self.ori_nc == preds["scores"].shape[1]:
1199
1197
  loss = torch.zeros(3, device=self.vp_criterion.device, requires_grad=True)
1200
1198
  return loss, loss.detach()
@@ -1230,8 +1228,6 @@ class TVPSegmentLoss(TVPDetectLoss):
1230
1228
 
1231
1229
  def loss(self, preds: Any, batch: dict[str, torch.Tensor]) -> tuple[torch.Tensor, torch.Tensor]:
1232
1230
  """Calculate the loss for text-visual prompt detection."""
1233
- assert self.ori_reg_max == self.vp_criterion.reg_max # TODO: remove it
1234
-
1235
1231
  if self.ori_nc == preds["scores"].shape[1]:
1236
1232
  loss = torch.zeros(4, device=self.vp_criterion.device, requires_grad=True)
1237
1233
  return loss, loss.detach()
@@ -46,6 +46,7 @@ TORCH_2_1 = check_version(TORCH_VERSION, "2.1.0")
46
46
  TORCH_2_4 = check_version(TORCH_VERSION, "2.4.0")
47
47
  TORCH_2_8 = check_version(TORCH_VERSION, "2.8.0")
48
48
  TORCH_2_9 = check_version(TORCH_VERSION, "2.9.0")
49
+ TORCH_2_10 = check_version(TORCH_VERSION, "2.10.0")
49
50
  TORCHVISION_0_10 = check_version(TORCHVISION_VERSION, "0.10.0")
50
51
  TORCHVISION_0_11 = check_version(TORCHVISION_VERSION, "0.11.0")
51
52
  TORCHVISION_0_13 = check_version(TORCHVISION_VERSION, "0.13.0")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ultralytics-opencv-headless
3
- Version: 8.4.8
3
+ Version: 8.4.10
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -39,8 +39,8 @@ Requires-Dist: pillow>=7.1.2
39
39
  Requires-Dist: pyyaml>=5.3.1
40
40
  Requires-Dist: requests>=2.23.0
41
41
  Requires-Dist: scipy>=1.4.1
42
- Requires-Dist: torch<2.10,>=1.8.0
43
- Requires-Dist: torch!=2.4.0,<2.10,>=1.8.0; sys_platform == "win32"
42
+ Requires-Dist: torch>=1.8.0
43
+ Requires-Dist: torch!=2.4.0,>=1.8.0; sys_platform == "win32"
44
44
  Requires-Dist: torchvision>=0.9.0
45
45
  Requires-Dist: psutil>=5.8.0
46
46
  Requires-Dist: polars>=0.20.0
@@ -1,13 +1,13 @@
1
1
  tests/__init__.py,sha256=hfUXxYLJB3846OCzWV94ZKEZsi8vq9Pqrdd2mMgjjck,804
2
2
  tests/conftest.py,sha256=rlKyDuOC_3ptXrWS8Q19bNEGOupUmYXHj3nB6o1GBGY,2318
3
3
  tests/test_cli.py,sha256=-OrAcZlcJ07UPagjSOlR8qXP5gNFHaTYcW3paOTURAE,5725
4
- tests/test_cuda.py,sha256=2TBe-ZkecMOGPWLdHcbsAjH3m9c5SQJ2KeyICgS0aeo,8426
4
+ tests/test_cuda.py,sha256=1CSODefiLsbkYUJ34Bdg5c6w50WNoqdoLBuXxWP0Ewo,8477
5
5
  tests/test_engine.py,sha256=ufSn3X4kL_Lpn2O25jKAfw_9QwHTMRjP9shDdpgBqnY,5740
6
6
  tests/test_exports.py,sha256=pZZJBN2uM5QdQMjnjIC-xZkKPOBbnnX8b5d5q90otl4,15651
7
7
  tests/test_integrations.py,sha256=FjvTGjXm3bvYHK3_obgObhC5SzHCTzw4aOJV9Hh08jQ,6220
8
- tests/test_python.py,sha256=BTyRn29boDKu4n0v1_5D3_7wvADs077NU9RFdTZktHo,30774
8
+ tests/test_python.py,sha256=amdS9eDhjpiN0aVc5d8awxaTYjIZUlfV909ykhhD7W8,30730
9
9
  tests/test_solutions.py,sha256=1tRlM72YciE42Nk9v83gsXOD5RSx9GSWVsKGhH7-HxE,14122
10
- ultralytics/__init__.py,sha256=jfmOTtuFV9ofd_zpWZoaGtHeh3SmmK1zHx1iu3QnbI4,1300
10
+ ultralytics/__init__.py,sha256=FtbFMrML8PlTVGpQeRFnqV_TMA_yiVxOuXp7vH766ng,1301
11
11
  ultralytics/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
12
12
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
13
13
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
@@ -119,7 +119,7 @@ ultralytics/cfg/trackers/botsort.yaml,sha256=tRxC-qT4Wz0mLn5x7ZEwrqgGKrmTDVY7gMg
119
119
  ultralytics/cfg/trackers/bytetrack.yaml,sha256=7LS1ObP5u7BUFcmeY6L2m3bRuPUktnpJspFKd_ElVWc,908
120
120
  ultralytics/data/__init__.py,sha256=ToR8zl0JhBHy42ZvV7zIwO_F3lbi5oNlGQNPK3dlddU,644
121
121
  ultralytics/data/annotator.py,sha256=iu1En-LzlR4RyR3ocftthnAog_peQHV9ForPRo_QcX8,2985
122
- ultralytics/data/augment.py,sha256=XR52_BEmwFOrdMxEVRypm_kz6ROkTBgVped05R2xZWs,128566
122
+ ultralytics/data/augment.py,sha256=z11SV8ikxHN59_ebvX_45dXH7iX1f8RG1MtANfdFK5E,128562
123
123
  ultralytics/data/base.py,sha256=pMs8yJOmAFPXdgfLCDtUemSvkPNDzxReP-fWzkNtonc,19723
124
124
  ultralytics/data/build.py,sha256=s-tkSZPf3OfQyfXPXB9XxdW_gIcU6Xy_u21ekSgTnRo,17205
125
125
  ultralytics/data/converter.py,sha256=4SwrEKzsdKK3YcoCcEhu0_UmFyaUuQEVPIWENFxlAC4,34520
@@ -133,7 +133,7 @@ ultralytics/data/scripts/get_coco.sh,sha256=UuJpJeo3qQpTHVINeOpmP0NYmg8PhEFE3A8J
133
133
  ultralytics/data/scripts/get_coco128.sh,sha256=qmRQl_hOKrsdHrTrnyQuFIH01oDz3lfaz138OgGfLt8,650
134
134
  ultralytics/data/scripts/get_imagenet.sh,sha256=hr42H16bM47iT27rgS7MpEo-GeOZAYUQXgr0B2cwn48,1705
135
135
  ultralytics/engine/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
136
- ultralytics/engine/exporter.py,sha256=y76PH93ULLplU8YvKh2reDJ9QWXjCkQRlusD6p9-NPg,73566
136
+ ultralytics/engine/exporter.py,sha256=5cAqRDaP6_4ERtvCNVWvimwSCqBs_WLUB2n0G4aXADo,73323
137
137
  ultralytics/engine/model.py,sha256=euDHUy7J5vVBvS_d-KbGZd_0BP5bF6Y3cTQ7VXtwZ4k,53210
138
138
  ultralytics/engine/predictor.py,sha256=x3xzVlfj92HgLdxPvoKFKpyzp1wSsNVCahpbO5sse80,23102
139
139
  ultralytics/engine/results.py,sha256=Lg-Ke8TU6qaxu0wQtOH26unORj4FRYxd8RL0VxV74Zw,68333
@@ -142,7 +142,7 @@ ultralytics/engine/tuner.py,sha256=RDiEWqADVutVDXRHvZIes8QqLUFnffXFXkXk4clfEuQ,2
142
142
  ultralytics/engine/validator.py,sha256=BoQ8mc-OLdAKCaS6ikL0MJf2LQVkNP1oN44ZCqkOx-g,18045
143
143
  ultralytics/hub/__init__.py,sha256=Z0K_E00jzQh90b18q3IDChwVmTvyIYp6C00sCV-n2F8,6709
144
144
  ultralytics/hub/auth.py,sha256=ANzCeZA7lUzTWc_sFHbDuuyBh1jLl2sTpHkoUbIkFYE,6254
145
- ultralytics/hub/session.py,sha256=OzBXAL9R135gRDdfNYUqyiSrxOyaiMFCVYSZua99sF0,18364
145
+ ultralytics/hub/session.py,sha256=OGk7-9alEFf23pxc-X7ethM5sVKrRdL2FY5nzWcS3IA,18363
146
146
  ultralytics/hub/utils.py,sha256=jknll06yNaAxKyOqKliILJv1XOU39WJWOGG_DyFUh20,6353
147
147
  ultralytics/hub/google/__init__.py,sha256=r06Ld4TuZEBOqg4iagpeN-eMAkg43T2OTxOH4_7IfkM,8445
148
148
  ultralytics/models/__init__.py,sha256=ljus_u1CIuP99k9fu6sCtzIeFZ-TCE28NZ8kefZHFNY,309
@@ -165,7 +165,7 @@ ultralytics/models/sam/amg.py,sha256=aYvJ7jQMkTR3X9KV7SHi3qP3yNchQggWNUurTRZwxQg
165
165
  ultralytics/models/sam/build.py,sha256=rEaFXA4R1nyutSonIenRKcuNtO1FgEojnkcayo0FTP4,12867
166
166
  ultralytics/models/sam/build_sam3.py,sha256=Gg_LiqNrCDTYaDWrob05vj-ln2AhkfMa5KkKhyk5wdE,11976
167
167
  ultralytics/models/sam/model.py,sha256=cOawDSkFqJPbt3455aTZ8tjaoWshFWFHQGGqxzsL_QQ,7372
168
- ultralytics/models/sam/predict.py,sha256=k4eTU3g7ihvAn-moBpzR4ox1GUlOEHVQDzywbnheFFM,203651
168
+ ultralytics/models/sam/predict.py,sha256=YvtSsyfdjwz24ecSMEU0pE9Y2wV320kG7UPeP8V8_fY,203734
169
169
  ultralytics/models/sam/modules/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
170
170
  ultralytics/models/sam/modules/blocks.py,sha256=ZU2aY4h6fmosj5pZ5EOEuO1O8Cl8UYeH11eOxkqCt8M,44570
171
171
  ultralytics/models/sam/modules/decoders.py,sha256=G4li37ahUe5rTTNTKibWMsAoz6G3R18rI8OPvfunVX8,25045
@@ -190,14 +190,14 @@ ultralytics/models/utils/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXp
190
190
  ultralytics/models/utils/loss.py,sha256=9CcqRXDj5-I-7eZuenInvyoLcPf22Ynf3rUFA5V22bI,21131
191
191
  ultralytics/models/utils/ops.py,sha256=z-Ebjv_k14bWOoP6nszDzDBiy3yELcVtbj6M8PsRpvE,15207
192
192
  ultralytics/models/yolo/__init__.py,sha256=YD407NDDiyjo0x_MR6usJaTpePKPgsfBUYehlCw7lRs,307
193
- ultralytics/models/yolo/model.py,sha256=HXkglzJQqW1x7MJaKavI5aasA-0lSH21Xcv_dac3SFU,18504
193
+ ultralytics/models/yolo/model.py,sha256=vLXTLDMjFTS7sD_Cif1Oc79OhhRVwwUMozVJeaslASg,18588
194
194
  ultralytics/models/yolo/classify/__init__.py,sha256=9--HVaNOfI1K7rn_rRqclL8FUAnpfeBrRqEQIaQw2xM,383
195
195
  ultralytics/models/yolo/classify/predict.py,sha256=HCStYkSqeg32SNTWfr4FDCkUMQ4wnKqceUK3T995us4,4137
196
196
  ultralytics/models/yolo/classify/train.py,sha256=xPlpioQFPeH32Frhy9ZbbGV_wcpn9hPB4EB4N0Kw-DE,9614
197
197
  ultralytics/models/yolo/classify/val.py,sha256=akH2P3nff4oiZtV2toKB3Z9HIbsVcwsb1uvDwhamszw,10503
198
198
  ultralytics/models/yolo/detect/__init__.py,sha256=GIRsLYR-kT4JJx7lh4ZZAFGBZj0aebokuU0A7JbjDVA,257
199
199
  ultralytics/models/yolo/detect/predict.py,sha256=2nxlMyw_zVKq1aeJFRTgb4EGL2vOFq4pLT9tArHBfF8,5385
200
- ultralytics/models/yolo/detect/train.py,sha256=N6Sdjnue9-bpnBMP5KGwsH9BFgjL23N9kDaHiXTBj9c,10757
200
+ ultralytics/models/yolo/detect/train.py,sha256=9JwTYi6M33cGhmAmdl099Bjrjb7woqu7fJSJgoivubk,10736
201
201
  ultralytics/models/yolo/detect/val.py,sha256=54AOR6r3istE0pILJ1v4xzPdv7UcvtTEZ6E5OGj3Jgc,22818
202
202
  ultralytics/models/yolo/obb/__init__.py,sha256=tQmpG8wVHsajWkZdmD6cjGohJ4ki64iSXQT8JY_dydo,221
203
203
  ultralytics/models/yolo/obb/predict.py,sha256=I7hWDr1zuy2WuwGom9uzXqomfr7qVMWb7iRl18xdTYw,2577
@@ -220,7 +220,7 @@ ultralytics/models/yolo/yoloe/train.py,sha256=q7K1fiqKrpbjfrrd3F3FiVMPtQAVuVzQin
220
220
  ultralytics/models/yolo/yoloe/train_seg.py,sha256=rV2Jnbuh6vvBMaupaZK_aRXBMevO0XhN2VUR43ZwlIY,5285
221
221
  ultralytics/models/yolo/yoloe/val.py,sha256=utUFWeFKRFWZrPr1y3A8ztbTwdoWMYqzlwBN7CQ0tCA,9418
222
222
  ultralytics/nn/__init__.py,sha256=538LZPUKKvc3JCMgiQ4VLGqRN2ZAaVLFcQbeNNHFkEA,545
223
- ultralytics/nn/autobackend.py,sha256=c3FzMw-0h5wEoxg0-n7rMWrIcR6C1WTNjF1AUpW07rM,45079
223
+ ultralytics/nn/autobackend.py,sha256=XNMUZbwcDtFLtWFohiWH6lufxhipjhVwN_SDzqnifg4,44939
224
224
  ultralytics/nn/tasks.py,sha256=xclS6E6OIBDurrDscTVmVafvmd8JOIiagIT4iEGwD4M,72588
225
225
  ultralytics/nn/text_model.py,sha256=c--WzxjFEDb7p95u3YGcSsJLjj91zFNqXshij8Evrwg,15291
226
226
  ultralytics/nn/modules/__init__.py,sha256=9KyQBxpomp5uJJ1PvMGuOFs2pR3NpqZcFHJlM6Q56c0,3322
@@ -262,28 +262,28 @@ ultralytics/trackers/utils/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6D
262
262
  ultralytics/trackers/utils/gmc.py,sha256=cvvhNXOhylVQti4pJQSNPx4yPqhhhw1k2yzY0JFl7Zo,13760
263
263
  ultralytics/trackers/utils/kalman_filter.py,sha256=crgysL2bo0v1eTljOlP2YqIJDLBcHjl75MRpbxfaR_M,21514
264
264
  ultralytics/trackers/utils/matching.py,sha256=x6uZOIx0O9oVmAcfY6tYMTJQE2cDTUlRR690Y5UkHLs,7129
265
- ultralytics/utils/__init__.py,sha256=XLEK_pvptzNWhJaO8x0MWghREIyEDei0LOGnUnmU1Kg,55145
265
+ ultralytics/utils/__init__.py,sha256=OnFc5uhU296r9v-RYp1q-qq8HY8CjELvJU3qHJRCznM,55155
266
266
  ultralytics/utils/autobatch.py,sha256=jiE4m_--H9UkXFDm_FqzcZk_hSTCGpS72XdVEKgZwAo,5114
267
267
  ultralytics/utils/autodevice.py,sha256=rXlPuo-iX-vZ4BabmMGEGh9Uxpau4R7Zlt1KCo9Xfyc,8892
268
268
  ultralytics/utils/benchmarks.py,sha256=y3aZ05qQhS2C3WI-iPeByOfmcaLLfXabsEufvXIv8lI,31819
269
- ultralytics/utils/checks.py,sha256=NWc0J-Nk4qHSVEXFDWfJkI7IjTNHFXajKjsSodDroBk,39411
269
+ ultralytics/utils/checks.py,sha256=zg8BAIcTS4glCuDvFf3-7l7LZ0QsSog5tZNhh84psos,40589
270
270
  ultralytics/utils/cpu.py,sha256=OksKOlX93AsbSsFuoYvLXRXgpkOibrZSwQyW6lipt4Q,3493
271
- ultralytics/utils/dist.py,sha256=sktf2a_uh-vLg6piQyiuRJ5JcMggFYmhS8Wepnb88WM,4220
271
+ ultralytics/utils/dist.py,sha256=GpdZLU3VQomg_dbHNMbzIgat-Y409plwcZJN5nF3YrU,4447
272
272
  ultralytics/utils/downloads.py,sha256=TWXkYwR5hEpVMWL6fbjdywDmZe02WhyL_8YuLVce-uM,23069
273
273
  ultralytics/utils/errors.py,sha256=dUZcTWpbJJHqEuWHM6IbeoJJ4TzA_yHBP8E7tEEpBVs,1388
274
274
  ultralytics/utils/events.py,sha256=6vqs_iSxoXIhQ804sOjApNZmXwNW9FUFtjaHPY8ta10,4665
275
275
  ultralytics/utils/files.py,sha256=u7pjz13wgkLSBfe_beeZrzar32_gaJWoIVa3nvY3mh8,8190
276
276
  ultralytics/utils/git.py,sha256=UdqeIiiEzg1qkerAZrg5YtTYPuJYwrpxW9N_6Pq6s8U,5501
277
- ultralytics/utils/instance.py,sha256=11mhefvTI9ftMqSirXuiViAi0Fxlo6v84qvNxfRNUoE,18862
277
+ ultralytics/utils/instance.py,sha256=aHBD5F8tJvll5pPfWlXFIXyCx5aYboaA1V9xCgh5V60,19442
278
278
  ultralytics/utils/logger.py,sha256=T5iaNnaqbCvx_FZf1dhVkr5FVxyxb4vO17t4SJfCIhg,19132
279
- ultralytics/utils/loss.py,sha256=7Z-CDlgsRldDart8j7ZjKot7TSj57IIwGj8C6QjTLx0,57003
279
+ ultralytics/utils/loss.py,sha256=h_BxLJRjaucZzaoGjMgpTvBR6HCn-MI209aQC2VeJeQ,56841
280
280
  ultralytics/utils/metrics.py,sha256=puMGn1LfVIlDvx5K7US4RtK8HYW6cRl9OznfV0nUPvk,69261
281
281
  ultralytics/utils/nms.py,sha256=zv1rOzMF6WU8Kdk41VzNf1H1EMt_vZHcbDFbg3mnN2o,14248
282
282
  ultralytics/utils/ops.py,sha256=4xqb7kwrAWm8c_zxOWP5JoXozgsA1Slk2s4XFwmEZCs,26089
283
283
  ultralytics/utils/patches.py,sha256=yXkznJNo3M74gvvzWmHoZYbWFu-KnO3KK4usbmey8H0,8521
284
284
  ultralytics/utils/plotting.py,sha256=_iXs4gs8tzMSgiKxCriD4un-MJkOsC3lGSy0wn7qZGk,48433
285
285
  ultralytics/utils/tal.py,sha256=9BSRgsYj0Llq7r5vOzkXDKUjfoTZsxiH92U09c6DtoU,24540
286
- ultralytics/utils/torch_utils.py,sha256=W6OX8p3fI44gF0TUdPTLV5NZlTE03YdwDbcZXy_e05k,40279
286
+ ultralytics/utils/torch_utils.py,sha256=H0ykzePdr55qPndFS9VVQCFH-fovbpK_uVBz4ooLvM8,40331
287
287
  ultralytics/utils/tqdm.py,sha256=f2W608Qpvgu6tFi28qylaZpcRv3IX8wTGY_8lgicaqY,16343
288
288
  ultralytics/utils/triton.py,sha256=BQu3CD3OlT76d1OtmnX5slQU37VC1kzRvEtfI2saIQA,5211
289
289
  ultralytics/utils/tuner.py,sha256=nRMmnyp0B0gVJzAXcpCxQUnwXjVp0WNiSJwxyR2xvQM,7303
@@ -303,9 +303,9 @@ ultralytics/utils/export/__init__.py,sha256=Cfh-PwVfTF_lwPp-Ss4wiX4z8Sm1XRPklsqd
303
303
  ultralytics/utils/export/engine.py,sha256=QoXPqnmQn6W5TOUAygOtCG63R9ExDG4-Df6X6W-_Mzo,10470
304
304
  ultralytics/utils/export/imx.py,sha256=VnMDO7c8ezBs91UDoLg9rR0oY8Uc7FujKpbdGxrzV18,13744
305
305
  ultralytics/utils/export/tensorflow.py,sha256=xHEcEM3_VeYctyqkJCpgkqcNie1M8xLqcFKr6uANEEQ,9951
306
- ultralytics_opencv_headless-8.4.8.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
307
- ultralytics_opencv_headless-8.4.8.dist-info/METADATA,sha256=XWxJ6mbIh2en0TbYqu7HVB4CNE-e8CkI_D8-aDojToM,39010
308
- ultralytics_opencv_headless-8.4.8.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
309
- ultralytics_opencv_headless-8.4.8.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
310
- ultralytics_opencv_headless-8.4.8.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
311
- ultralytics_opencv_headless-8.4.8.dist-info/RECORD,,
306
+ ultralytics_opencv_headless-8.4.10.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
307
+ ultralytics_opencv_headless-8.4.10.dist-info/METADATA,sha256=PHkk8v4-VHj9saUGpEMI700rLqTcq9z37unLYsL_Vsc,38999
308
+ ultralytics_opencv_headless-8.4.10.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
309
+ ultralytics_opencv_headless-8.4.10.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
310
+ ultralytics_opencv_headless-8.4.10.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
311
+ ultralytics_opencv_headless-8.4.10.dist-info/RECORD,,