ultralytics 8.3.202__py3-none-any.whl → 8.3.203__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tests/test_cli.py CHANGED
@@ -1,13 +1,14 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
3
  import subprocess
4
+ from pathlib import Path
4
5
 
5
6
  import pytest
6
7
  from PIL import Image
7
8
 
8
9
  from tests import CUDA_DEVICE_COUNT, CUDA_IS_AVAILABLE, MODELS, TASK_MODEL_DATA
9
10
  from ultralytics.utils import ARM64, ASSETS, LINUX, WEIGHTS_DIR, checks
10
- from ultralytics.utils.torch_utils import TORCH_1_9
11
+ from ultralytics.utils.torch_utils import TORCH_1_11
11
12
 
12
13
 
13
14
  def run(cmd: str) -> None:
@@ -48,15 +49,12 @@ def test_export(model: str) -> None:
48
49
  run(f"yolo export model={model} format=torchscript imgsz=32")
49
50
 
50
51
 
51
- def test_rtdetr(task: str = "detect", model: str = "yolov8n-rtdetr.yaml", data: str = "coco8.yaml") -> None:
52
+ @pytest.mark.skipif(not TORCH_1_11, reason="RTDETR requires torch>=1.11")
53
+ def test_rtdetr(task: str = "detect", model: Path = WEIGHTS_DIR / "rtdetr-l.pt", data: str = "coco8.yaml") -> None:
52
54
  """Test the RTDETR functionality within Ultralytics for detection tasks using specified model and data."""
53
- # Warning: must use imgsz=640 (note also add comma, spaces, fraction=0.25 args to test single-image training)
54
- run(f"yolo train {task} model={model} data={data} --imgsz= 160 epochs =1, cache = disk fraction=0.25") # spaces
55
+ # Add comma, spaces, fraction=0.25 args to test single-image training
55
56
  run(f"yolo predict {task} model={model} source={ASSETS / 'bus.jpg'} imgsz=160 save save_crop save_txt")
56
- if TORCH_1_9:
57
- weights = WEIGHTS_DIR / "rtdetr-l.pt"
58
- run(f"yolo predict {task} model={weights} source={ASSETS / 'bus.jpg'} imgsz=160 save save_crop save_txt")
59
- run(f"yolo train {task} model={weights} epochs=1 imgsz=160 cache=disk data=coco8.yaml")
57
+ run(f"yolo train {task} model={model} data={data} --imgsz= 160 epochs =1, cache = disk fraction=0.25")
60
58
 
61
59
 
62
60
  @pytest.mark.skipif(checks.IS_PYTHON_3_12, reason="MobileSAM with CLIP is not supported in Python 3.12")
tests/test_exports.py CHANGED
@@ -20,7 +20,7 @@ from ultralytics.utils import (
20
20
  WINDOWS,
21
21
  checks,
22
22
  )
23
- from ultralytics.utils.torch_utils import TORCH_1_9, TORCH_1_13
23
+ from ultralytics.utils.torch_utils import TORCH_1_11, TORCH_1_13, TORCH_2_1
24
24
 
25
25
 
26
26
  def test_export_torchscript():
@@ -35,7 +35,7 @@ def test_export_onnx():
35
35
  YOLO(file)(SOURCE, imgsz=32) # exported model inference
36
36
 
37
37
 
38
- @pytest.mark.skipif(not TORCH_1_13, reason="OpenVINO requires torch>=1.13")
38
+ @pytest.mark.skipif(not TORCH_2_1, reason="OpenVINO requires torch>=2.1")
39
39
  def test_export_openvino():
40
40
  """Test YOLO export to OpenVINO format for model inference compatibility."""
41
41
  file = YOLO(MODEL).export(format="openvino", imgsz=32)
@@ -43,7 +43,7 @@ def test_export_openvino():
43
43
 
44
44
 
45
45
  @pytest.mark.slow
46
- @pytest.mark.skipif(not TORCH_1_13, reason="OpenVINO requires torch>=1.13")
46
+ @pytest.mark.skipif(not TORCH_2_1, reason="OpenVINO requires torch>=2.1")
47
47
  @pytest.mark.parametrize(
48
48
  "task, dynamic, int8, half, batch, nms",
49
49
  [ # generate all combinations except for exclusion cases
@@ -117,7 +117,7 @@ def test_export_torchscript_matrix(task, dynamic, int8, half, batch, nms):
117
117
 
118
118
  @pytest.mark.slow
119
119
  @pytest.mark.skipif(not MACOS, reason="CoreML inference only supported on macOS")
120
- @pytest.mark.skipif(not TORCH_1_9, reason="CoreML>=7.2 not supported with PyTorch<=1.8")
120
+ @pytest.mark.skipif(not TORCH_1_11, reason="CoreML export requires torch>=1.11")
121
121
  @pytest.mark.skipif(checks.IS_PYTHON_3_13, reason="CoreML not supported in Python 3.13")
122
122
  @pytest.mark.parametrize(
123
123
  "task, dynamic, int8, half, nms, batch",
@@ -169,7 +169,7 @@ def test_export_tflite_matrix(task, dynamic, int8, half, batch, nms):
169
169
  Path(file).unlink() # cleanup
170
170
 
171
171
 
172
- @pytest.mark.skipif(not TORCH_1_9, reason="CoreML>=7.2 not supported with PyTorch<=1.8")
172
+ @pytest.mark.skipif(not TORCH_1_11, reason="CoreML export requires torch>=1.11")
173
173
  @pytest.mark.skipif(WINDOWS, reason="CoreML not supported on Windows") # RuntimeError: BlobWriter not loaded
174
174
  @pytest.mark.skipif(LINUX and ARM64, reason="CoreML not supported on aarch64 Linux")
175
175
  @pytest.mark.skipif(checks.IS_PYTHON_3_13, reason="CoreML not supported in Python 3.13")
tests/test_python.py CHANGED
@@ -34,7 +34,7 @@ from ultralytics.utils import (
34
34
  is_github_action_running,
35
35
  )
36
36
  from ultralytics.utils.downloads import download
37
- from ultralytics.utils.torch_utils import TORCH_1_9
37
+ from ultralytics.utils.torch_utils import TORCH_1_11, TORCH_1_13
38
38
 
39
39
  IS_TMP_WRITEABLE = is_dir_writeable(TMP) # WARNING: must be run once tests start as TMP does not exist on tests/init
40
40
 
@@ -125,7 +125,9 @@ def test_predict_img(model_name):
125
125
  batch = [
126
126
  str(SOURCE), # filename
127
127
  Path(SOURCE), # Path
128
- "https://github.com/ultralytics/assets/releases/download/v0.0.0/zidane.jpg" if ONLINE else SOURCE, # URI
128
+ "https://github.com/ultralytics/assets/releases/download/v0.0.0/zidane.jpg?token=123"
129
+ if ONLINE
130
+ else SOURCE, # URI
129
131
  im, # OpenCV
130
132
  Image.open(SOURCE), # PIL
131
133
  np.zeros((320, 640, channels), dtype=np.uint8), # numpy
@@ -246,7 +248,7 @@ def test_all_model_yamls():
246
248
  """Test YOLO model creation for all available YAML configurations in the `cfg/models` directory."""
247
249
  for m in (ROOT / "cfg" / "models").rglob("*.yaml"):
248
250
  if "rtdetr" in m.name:
249
- if TORCH_1_9: # torch<=1.8 issue - TypeError: __init__() got an unexpected keyword argument 'batch_first'
251
+ if TORCH_1_11:
250
252
  _ = RTDETR(m.name)(SOURCE, imgsz=640) # must be 640
251
253
  else:
252
254
  YOLO(m.name)
@@ -634,7 +636,8 @@ def test_yolo_world():
634
636
  )
635
637
 
636
638
 
637
- @pytest.mark.skipif(checks.IS_PYTHON_3_12 or not TORCH_1_9, reason="YOLOE with CLIP is not supported in Python 3.12")
639
+ @pytest.mark.skipif(not TORCH_1_13, reason="YOLOE with CLIP requires torch>=1.13")
640
+ @pytest.mark.skipif(checks.IS_PYTHON_3_12, reason="YOLOE with CLIP is not supported in Python 3.12")
638
641
  @pytest.mark.skipif(
639
642
  checks.IS_PYTHON_3_8 and LINUX and ARM64,
640
643
  reason="YOLOE with CLIP is not supported in Python 3.8 and aarch64 Linux",
@@ -648,16 +651,12 @@ def test_yoloe():
648
651
  model.set_classes(names, model.get_text_pe(names))
649
652
  model(SOURCE, conf=0.01)
650
653
 
651
- import numpy as np
652
-
653
654
  from ultralytics import YOLOE
654
655
  from ultralytics.models.yolo.yoloe import YOLOEVPSegPredictor
655
656
 
656
657
  # visual-prompts
657
658
  visuals = dict(
658
- bboxes=np.array(
659
- [[221.52, 405.8, 344.98, 857.54], [120, 425, 160, 445]],
660
- ),
659
+ bboxes=np.array([[221.52, 405.8, 344.98, 857.54], [120, 425, 160, 445]]),
661
660
  cls=np.array([0, 1]),
662
661
  )
663
662
  model.predict(
ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- __version__ = "8.3.202"
3
+ __version__ = "8.3.203"
4
4
 
5
5
  import importlib
6
6
  import os
ultralytics/data/build.py CHANGED
@@ -7,6 +7,7 @@ import random
7
7
  from collections.abc import Iterator
8
8
  from pathlib import Path
9
9
  from typing import Any
10
+ from urllib.parse import urlsplit
10
11
 
11
12
  import numpy as np
12
13
  import torch
@@ -247,8 +248,10 @@ def check_source(source):
247
248
  if isinstance(source, (str, int, Path)): # int for local usb camera
248
249
  source = str(source)
249
250
  source_lower = source.lower()
250
- is_file = source_lower.rpartition(".")[-1] in (IMG_FORMATS | VID_FORMATS)
251
251
  is_url = source_lower.startswith(("https://", "http://", "rtsp://", "rtmp://", "tcp://"))
252
+ is_file = (urlsplit(source_lower).path if is_url else source_lower).rpartition(".")[-1] in (
253
+ IMG_FORMATS | VID_FORMATS
254
+ )
252
255
  webcam = source.isnumeric() or source.endswith(".streams") or (is_url and not is_file)
253
256
  screenshot = source_lower == "screen"
254
257
  if is_url and is_file:
@@ -112,7 +112,7 @@ from ultralytics.utils.metrics import batch_probiou
112
112
  from ultralytics.utils.nms import TorchNMS
113
113
  from ultralytics.utils.ops import Profile
114
114
  from ultralytics.utils.patches import arange_patch
115
- from ultralytics.utils.torch_utils import TORCH_1_13, get_latest_opset, select_device
115
+ from ultralytics.utils.torch_utils import TORCH_1_11, TORCH_1_13, TORCH_2_1, select_device
116
116
 
117
117
 
118
118
  def export_formats():
@@ -152,6 +152,31 @@ def export_formats():
152
152
  return dict(zip(["Format", "Argument", "Suffix", "CPU", "GPU", "Arguments"], zip(*x)))
153
153
 
154
154
 
155
+ def best_onnx_opset(onnx) -> int:
156
+ """Return max ONNX opset for this torch version with ONNX fallback."""
157
+ if TORCH_1_13: # not supported by torch<1.13
158
+ opset = torch.onnx.utils._constants.ONNX_MAX_OPSET - 1 # use second-latest version for safety
159
+ else:
160
+ opset = {
161
+ "1.8": 12,
162
+ "1.9": 12,
163
+ "1.10": 13,
164
+ "1.11": 14,
165
+ "1.12": 15,
166
+ "1.13": 17,
167
+ "2.0": 18,
168
+ "2.1": 19,
169
+ "2.2": 19,
170
+ "2.3": 19,
171
+ "2.4": 20,
172
+ "2.5": 20,
173
+ "2.6": 20,
174
+ "2.7": 20,
175
+ "2.8": 23,
176
+ }.get(".".join(TORCH_VERSION.split(".")[:2]), 12)
177
+ return min(opset, onnx.defs.onnx_opset_version())
178
+
179
+
155
180
  def validate_args(format, passed_args, valid_args):
156
181
  """
157
182
  Validate arguments based on the export format.
@@ -586,8 +611,8 @@ class Exporter:
586
611
  check_requirements(requirements)
587
612
  import onnx # noqa
588
613
 
589
- opset_version = self.args.opset or get_latest_opset()
590
- LOGGER.info(f"\n{prefix} starting export with onnx {onnx.__version__} opset {opset_version}...")
614
+ opset = self.args.opset or best_onnx_opset(onnx)
615
+ LOGGER.info(f"\n{prefix} starting export with onnx {onnx.__version__} opset {opset}...")
591
616
  f = str(self.file.with_suffix(".onnx"))
592
617
  output_names = ["output0", "output1"] if isinstance(self.model, SegmentationModel) else ["output0"]
593
618
  dynamic = self.args.dynamic
@@ -601,14 +626,14 @@ class Exporter:
601
626
  if self.args.nms: # only batch size is dynamic with NMS
602
627
  dynamic["output0"].pop(2)
603
628
  if self.args.nms and self.model.task == "obb":
604
- self.args.opset = opset_version # for NMSModel
629
+ self.args.opset = opset # for NMSModel
605
630
 
606
631
  with arange_patch(self.args):
607
632
  torch2onnx(
608
633
  NMSModel(self.model, self.args) if self.args.nms else self.model,
609
634
  self.im,
610
635
  f,
611
- opset=opset_version,
636
+ opset=opset,
612
637
  input_names=["images"],
613
638
  output_names=output_names,
614
639
  dynamic=dynamic or None,
@@ -633,6 +658,11 @@ class Exporter:
633
658
  meta = model_onnx.metadata_props.add()
634
659
  meta.key, meta.value = k, str(v)
635
660
 
661
+ # IR version
662
+ if getattr(model_onnx, "ir_version", 0) > 10:
663
+ LOGGER.info(f"{prefix} limiting IR version {model_onnx.ir_version} to 10 for ONNXRuntime compatibility...")
664
+ model_onnx.ir_version = 10
665
+
636
666
  onnx.save(model_onnx, f)
637
667
  return f
638
668
 
@@ -644,7 +674,7 @@ class Exporter:
644
674
  import openvino as ov
645
675
 
646
676
  LOGGER.info(f"\n{prefix} starting export with openvino {ov.__version__}...")
647
- assert TORCH_1_13, f"OpenVINO export requires torch>=1.13.0 but torch=={TORCH_VERSION} is installed"
677
+ assert TORCH_2_1, f"OpenVINO export requires torch>=2.1 but torch=={TORCH_VERSION} is installed"
648
678
  ov_model = ov.convert_model(
649
679
  NMSModel(self.model, self.args) if self.args.nms else self.model,
650
680
  input=None if self.args.dynamic else [self.im.shape],
@@ -837,6 +867,7 @@ class Exporter:
837
867
 
838
868
  LOGGER.info(f"\n{prefix} starting export with coremltools {ct.__version__}...")
839
869
  assert not WINDOWS, "CoreML export is not supported on Windows, please run on macOS or Linux."
870
+ assert TORCH_1_11, "CoreML export requires torch>=1.11"
840
871
  assert self.args.batch == 1, "CoreML batch sizes > 1 are not supported. Please retry at 'batch=1'."
841
872
  f = self.file.with_suffix(".mlmodel" if mlmodel else ".mlpackage")
842
873
  if f.is_dir():
@@ -415,9 +415,12 @@ class BaseTrainer:
415
415
  # Forward
416
416
  with autocast(self.amp):
417
417
  batch = self.preprocess_batch(batch)
418
- # decouple inference and loss calculations for torch.compile convenience
419
- preds = self.model(batch["img"])
420
- loss, self.loss_items = unwrap_model(self.model).loss(batch, preds)
418
+ if self.args.compile:
419
+ # Decouple inference and loss calculations for improved compile performance
420
+ preds = self.model(batch["img"])
421
+ loss, self.loss_items = unwrap_model(self.model).loss(batch, preds)
422
+ else:
423
+ loss, self.loss_items = self.model(batch)
421
424
  self.loss = loss.sum()
422
425
  if RANK != -1:
423
426
  self.loss *= self.world_size
@@ -581,6 +584,7 @@ class BaseTrainer:
581
584
  "ema": deepcopy(unwrap_model(self.ema.ema)).half(),
582
585
  "updates": self.ema.updates,
583
586
  "optimizer": convert_optimizer_state_dict_to_fp16(deepcopy(self.optimizer.state_dict())),
587
+ "scaler": self.scaler.state_dict(),
584
588
  "train_args": vars(self.args), # save as dict
585
589
  "train_metrics": {**self.metrics, **{"fitness": self.fitness}},
586
590
  "train_results": self.read_results_csv(),
@@ -809,9 +813,11 @@ class BaseTrainer:
809
813
  return
810
814
  best_fitness = 0.0
811
815
  start_epoch = ckpt.get("epoch", -1) + 1
812
- if ckpt.get("optimizer", None) is not None:
816
+ if ckpt.get("optimizer") is not None:
813
817
  self.optimizer.load_state_dict(ckpt["optimizer"]) # optimizer
814
818
  best_fitness = ckpt["best_fitness"]
819
+ if ckpt.get("scaler") is not None:
820
+ self.scaler.load_state_dict(ckpt["scaler"])
815
821
  if self.ema and ckpt.get("ema"):
816
822
  self.ema.ema.load_state_dict(ckpt["ema"].float().state_dict()) # EMA
817
823
  self.ema.updates = ckpt["updates"]
@@ -435,7 +435,7 @@ class Tuner:
435
435
  best_metrics = {k: round(v, 5) for k, v in metrics.items()}
436
436
  for ckpt in weights_dir.glob("*.pt"):
437
437
  shutil.copy2(ckpt, self.tune_dir / "weights")
438
- elif cleanup:
438
+ elif cleanup and best_save_dir:
439
439
  shutil.rmtree(best_save_dir, ignore_errors=True) # remove iteration dirs to reduce storage space
440
440
 
441
441
  # Plot tune results
@@ -7,6 +7,7 @@ from ultralytics.models.yolo.segment import SegmentationPredictor
7
7
  from ultralytics.utils import DEFAULT_CFG, checks
8
8
  from ultralytics.utils.metrics import box_iou
9
9
  from ultralytics.utils.ops import scale_masks
10
+ from ultralytics.utils.torch_utils import TORCH_1_10
10
11
 
11
12
  from .utils import adjust_bboxes_to_image_border
12
13
 
@@ -135,7 +136,7 @@ class FastSAMPredictor(SegmentationPredictor):
135
136
  crop_ims, filter_idx = [], []
136
137
  for i, b in enumerate(result.boxes.xyxy.tolist()):
137
138
  x1, y1, x2, y2 = (int(x) for x in b)
138
- if masks[i].sum() <= 100:
139
+ if (masks[i].sum() if TORCH_1_10 else masks[i].sum(0).sum()) <= 100: # torch 1.9 bug workaround
139
140
  filter_idx.append(i)
140
141
  continue
141
142
  crop_ims.append(Image.fromarray(result.orig_img[y1:y2, x1:x2, ::-1]))
@@ -11,6 +11,7 @@ References:
11
11
 
12
12
  from ultralytics.engine.model import Model
13
13
  from ultralytics.nn.tasks import RTDETRDetectionModel
14
+ from ultralytics.utils.torch_utils import TORCH_1_11
14
15
 
15
16
  from .predict import RTDETRPredictor
16
17
  from .train import RTDETRTrainer
@@ -44,6 +45,7 @@ class RTDETR(Model):
44
45
  Args:
45
46
  model (str): Path to the pre-trained model. Supports .pt, .yaml, and .yml formats.
46
47
  """
48
+ assert TORCH_1_11, "RTDETR requires torch>=1.11"
47
49
  super().__init__(model=model, task="detect")
48
50
 
49
51
  @property
@@ -64,6 +64,7 @@ class WorldTrainer(DetectionTrainer):
64
64
  """
65
65
  if overrides is None:
66
66
  overrides = {}
67
+ assert not overrides.get("compile"), f"Training with 'model={overrides['model']}' requires 'compile=False'"
67
68
  super().__init__(cfg, overrides, _callbacks)
68
69
  self.text_embeddings = None
69
70
 
@@ -46,6 +46,7 @@ class YOLOETrainer(DetectionTrainer):
46
46
  """
47
47
  if overrides is None:
48
48
  overrides = {}
49
+ assert not overrides.get("compile"), f"Training with 'model={overrides['model']}' requires 'compile=False'"
49
50
  overrides["overlap_mask"] = False
50
51
  super().__init__(cfg, overrides, _callbacks)
51
52
 
@@ -12,8 +12,8 @@ import torch.nn.functional as F
12
12
  from torch.nn.init import constant_, xavier_uniform_
13
13
 
14
14
  from ultralytics.utils import NOT_MACOS14
15
- from ultralytics.utils.tal import TORCH_1_10, dist2bbox, dist2rbox, make_anchors
16
- from ultralytics.utils.torch_utils import fuse_conv_and_bn, smart_inference_mode
15
+ from ultralytics.utils.tal import dist2bbox, dist2rbox, make_anchors
16
+ from ultralytics.utils.torch_utils import TORCH_1_11, fuse_conv_and_bn, smart_inference_mode
17
17
 
18
18
  from .block import DFL, SAVPE, BNContrastiveHead, ContrastiveHead, Proto, Residual, SwiGLUFFN
19
19
  from .conv import Conv, DWConv
@@ -1052,7 +1052,7 @@ class RTDETRDecoder(nn.Module):
1052
1052
  for i, (h, w) in enumerate(shapes):
1053
1053
  sy = torch.arange(end=h, dtype=dtype, device=device)
1054
1054
  sx = torch.arange(end=w, dtype=dtype, device=device)
1055
- grid_y, grid_x = torch.meshgrid(sy, sx, indexing="ij") if TORCH_1_10 else torch.meshgrid(sy, sx)
1055
+ grid_y, grid_x = torch.meshgrid(sy, sx, indexing="ij") if TORCH_1_11 else torch.meshgrid(sy, sx)
1056
1056
  grid_xy = torch.stack([grid_x, grid_y], -1) # (h, w, 2)
1057
1057
 
1058
1058
  valid_WH = torch.tensor([w, h], dtype=dtype, device=device)
@@ -10,6 +10,8 @@ import torch.nn as nn
10
10
  import torch.nn.functional as F
11
11
  from torch.nn.init import constant_, xavier_uniform_
12
12
 
13
+ from ultralytics.utils.torch_utils import TORCH_1_11
14
+
13
15
  from .conv import Conv
14
16
  from .utils import _get_clones, inverse_sigmoid, multi_scale_deformable_attn_pytorch
15
17
 
@@ -236,7 +238,7 @@ class AIFI(TransformerEncoderLayer):
236
238
  assert embed_dim % 4 == 0, "Embed dimension must be divisible by 4 for 2D sin-cos position embedding"
237
239
  grid_w = torch.arange(w, dtype=torch.float32)
238
240
  grid_h = torch.arange(h, dtype=torch.float32)
239
- grid_w, grid_h = torch.meshgrid(grid_w, grid_h, indexing="ij")
241
+ grid_w, grid_h = torch.meshgrid(grid_w, grid_h, indexing="ij") if TORCH_1_11 else torch.meshgrid(grid_w, grid_h)
240
242
  pos_dim = embed_dim // 4
241
243
  omega = torch.arange(pos_dim, dtype=torch.float32) / pos_dim
242
244
  omega = 1.0 / (temperature**omega)
@@ -669,6 +669,9 @@ def check_yolo(verbose=True, device=""):
669
669
  else:
670
670
  s = ""
671
671
 
672
+ if GIT.is_repo:
673
+ check_multiple_install() # check conflicting installation if using local clone
674
+
672
675
  select_device(device=device, newline=False)
673
676
  LOGGER.info(f"Setup complete ✅ {s}")
674
677
 
@@ -807,6 +810,30 @@ def check_amp(model):
807
810
  return True
808
811
 
809
812
 
813
+ def check_multiple_install():
814
+ """Check if there are multiple Ultralytics installations."""
815
+ import sys
816
+
817
+ try:
818
+ result = subprocess.run([sys.executable, "-m", "pip", "show", "ultralytics"], capture_output=True, text=True)
819
+ install_msg = (
820
+ f"Install your local copy in editable mode with 'pip install -e {ROOT.parent}' to avoid "
821
+ "issues. See https://docs.ultralytics.com/quickstart/"
822
+ )
823
+ if result.returncode != 0:
824
+ if "not found" in result.stderr.lower(): # Package not pip-installed but locally imported
825
+ LOGGER.warning(f"Ultralytics not found via pip but importing from: {ROOT}. {install_msg}")
826
+ return
827
+ yolo_path = (Path(re.findall(r"location:\s+(.+)", result.stdout, flags=re.I)[-1]) / "ultralytics").resolve()
828
+ if not yolo_path.samefile(ROOT.resolve()):
829
+ LOGGER.warning(
830
+ f"Multiple Ultralytics installations detected. The `yolo` command uses: {yolo_path}, "
831
+ f"but current session imports from: {ROOT}. This may cause version conflicts. {install_msg}"
832
+ )
833
+ except Exception:
834
+ return
835
+
836
+
810
837
  def print_args(args: dict | None = None, show_file=True, show_func=False):
811
838
  """
812
839
  Print function arguments (optional args dict).
ultralytics/utils/tal.py CHANGED
@@ -3,12 +3,10 @@
3
3
  import torch
4
4
  import torch.nn as nn
5
5
 
6
- from . import LOGGER, TORCH_VERSION
7
- from .checks import check_version
6
+ from . import LOGGER
8
7
  from .metrics import bbox_iou, probiou
9
8
  from .ops import xywhr2xyxyxyxy
10
-
11
- TORCH_1_10 = check_version(TORCH_VERSION, "1.10.0")
9
+ from .torch_utils import TORCH_1_11
12
10
 
13
11
 
14
12
  class TaskAlignedAssigner(nn.Module):
@@ -373,7 +371,7 @@ def make_anchors(feats, strides, grid_cell_offset=0.5):
373
371
  h, w = feats[i].shape[2:] if isinstance(feats, list) else (int(feats[i][0]), int(feats[i][1]))
374
372
  sx = torch.arange(end=w, device=device, dtype=dtype) + grid_cell_offset # shift x
375
373
  sy = torch.arange(end=h, device=device, dtype=dtype) + grid_cell_offset # shift y
376
- sy, sx = torch.meshgrid(sy, sx, indexing="ij") if TORCH_1_10 else torch.meshgrid(sy, sx)
374
+ sy, sx = torch.meshgrid(sy, sx, indexing="ij") if TORCH_1_11 else torch.meshgrid(sy, sx)
377
375
  anchor_points.append(torch.stack((sx, sy), -1).view(-1, 2))
378
376
  stride_tensor.append(torch.full((h * w, 1), stride, dtype=dtype, device=device))
379
377
  return torch.cat(anchor_points), torch.cat(stride_tensor)
@@ -38,8 +38,11 @@ from ultralytics.utils.patches import torch_load
38
38
 
39
39
  # Version checks (all default to version>=min_version)
40
40
  TORCH_1_9 = check_version(TORCH_VERSION, "1.9.0")
41
+ TORCH_1_10 = check_version(TORCH_VERSION, "1.10.0")
42
+ TORCH_1_11 = check_version(TORCH_VERSION, "1.11.0")
41
43
  TORCH_1_13 = check_version(TORCH_VERSION, "1.13.0")
42
44
  TORCH_2_0 = check_version(TORCH_VERSION, "2.0.0")
45
+ TORCH_2_1 = check_version(TORCH_VERSION, "2.1.0")
43
46
  TORCH_2_4 = check_version(TORCH_VERSION, "2.4.0")
44
47
  TORCHVISION_0_10 = check_version(TORCHVISION_VERSION, "0.10.0")
45
48
  TORCHVISION_0_11 = check_version(TORCHVISION_VERSION, "0.11.0")
@@ -534,21 +537,6 @@ def copy_attr(a, b, include=(), exclude=()):
534
537
  setattr(a, k, v)
535
538
 
536
539
 
537
- def get_latest_opset():
538
- """
539
- Return the second-most recent ONNX opset version supported by this version of PyTorch, adjusted for maturity.
540
-
541
- Returns:
542
- (int): The ONNX opset version.
543
- """
544
- if TORCH_1_13:
545
- # If the PyTorch>=1.13, dynamically compute the latest opset minus one using 'symbolic_opset'
546
- return max(int(k[14:]) for k in vars(torch.onnx) if "symbolic_opset" in k) - 1
547
- # Otherwise for PyTorch<=1.12 return the corresponding predefined opset
548
- version = torch.onnx.producer_version.rsplit(".", 1)[0] # i.e. '2.3'
549
- return {"1.12": 15, "1.11": 14, "1.10": 13, "1.9": 12, "1.8": 12}.get(version, 12)
550
-
551
-
552
540
  def intersect_dicts(da, db, exclude=()):
553
541
  """
554
542
  Return a dictionary of intersecting keys with matching shapes, excluding 'exclude' keys, using da values.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ultralytics
3
- Version: 8.3.202
3
+ Version: 8.3.203
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -1,13 +1,13 @@
1
1
  tests/__init__.py,sha256=b4KP5_q-2IO8Br8YHOSLYnn7IwZS81l_vfEF2YPa2lM,894
2
2
  tests/conftest.py,sha256=LXtQJcFNWPGuzauTGkiXgsvVC3llJKfg22WcmhRzuQc,2593
3
- tests/test_cli.py,sha256=EMf5gTAopOnIz8VvzaM-Qb044o7D0flnUHYQ-2ffOM4,5670
3
+ tests/test_cli.py,sha256=IX-ddXRCb0QSW1KuZBdvciyWpuzCAPMy2Tus4OD6Yfo,5453
4
4
  tests/test_cuda.py,sha256=3eiigQIWEkqLsIznlqAMrAi3Dhd_N54Ojtm5LCQELyo,8022
5
5
  tests/test_engine.py,sha256=8W4_D48ZBUp-DsUlRYxHTXzougycY8yggvpbVwQDLPg,5025
6
- tests/test_exports.py,sha256=dWuroSyqXnrc0lE-RNTf7pZoXXXEkOs31u7nhOiEHS0,10994
6
+ tests/test_exports.py,sha256=Lc9Qbeth8cse0W5lu3JppHMFl2RacXI1qlIewrlYHlk,10986
7
7
  tests/test_integrations.py,sha256=kl_AKmE_Qs1GB0_91iVwbzNxofm_hFTt0zzU6JF-pg4,6323
8
- tests/test_python.py,sha256=2V23f2-JQsO-K4p1kj0IkCRxHykGwgd0edKJzRsBgdI,27911
8
+ tests/test_python.py,sha256=KkBDNWqSUGt7qf04ef7q2xUYrqMvgOpbtwwlQWloJMY,27877
9
9
  tests/test_solutions.py,sha256=6wJ9-lhyWSAm7zaR4D9L_DrUA3iJU1NgqmbQO6PIuvo,13211
10
- ultralytics/__init__.py,sha256=G1mm6n1LLsHdXaTS_Bpe-yd2AEKD1QN3HX68A1OCpt4,1120
10
+ ultralytics/__init__.py,sha256=n9u727aW14rdduCGPhF1H1txDO1nQrjrxQSkykAuBCU,1120
11
11
  ultralytics/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
12
12
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
13
13
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
@@ -109,7 +109,7 @@ ultralytics/data/__init__.py,sha256=nAXaL1puCc7z_NjzQNlJnhbVhT9Fla2u7Dsqo7q1dAc,
109
109
  ultralytics/data/annotator.py,sha256=f15TCDEM8SuuzHiFB8oyhTy9vfywKmPTLSPAgsZQP9I,2990
110
110
  ultralytics/data/augment.py,sha256=7NsRCYu_uM6KkpU0F03NC9Ra_GQVGp2dRO1RksrrU38,132897
111
111
  ultralytics/data/base.py,sha256=gWoGFifyNe1TCwtGdGp5jzKOQ9sh4b-XrfyN0PPvRaY,19661
112
- ultralytics/data/build.py,sha256=Bhu8E-FNSkTbz6YpNXeUBmQtN91ZtZxOCUiKYXgzV-c,11778
112
+ ultralytics/data/build.py,sha256=cdhD1Z4Gv9KLi5n9OchDRBH8rfMQ1NyDja_D7DmAS00,11879
113
113
  ultralytics/data/converter.py,sha256=N1YFD0mG7uwL12wMcuVtF2zbISBIzTsGiy1QioDTDGs,32049
114
114
  ultralytics/data/dataset.py,sha256=GL6J_fvluaF2Ck1in3W5q3Xm7lRcUd6Amgd_uu6r_FM,36772
115
115
  ultralytics/data/loaders.py,sha256=sfQ0C86uBg9QQbN3aU0W8FIjGQmMdJTQAMK4DA1bjk8,31748
@@ -121,12 +121,12 @@ ultralytics/data/scripts/get_coco.sh,sha256=UuJpJeo3qQpTHVINeOpmP0NYmg8PhEFE3A8J
121
121
  ultralytics/data/scripts/get_coco128.sh,sha256=qmRQl_hOKrsdHrTrnyQuFIH01oDz3lfaz138OgGfLt8,650
122
122
  ultralytics/data/scripts/get_imagenet.sh,sha256=hr42H16bM47iT27rgS7MpEo-GeOZAYUQXgr0B2cwn48,1705
123
123
  ultralytics/engine/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
124
- ultralytics/engine/exporter.py,sha256=zq5b9m_SlRDwXor6Iq1qKFKkRphIAbJBVvGsRmMUCGA,68280
124
+ ultralytics/engine/exporter.py,sha256=G7DIQtQfdvgWbCMVKQQmluWZ_LQP4ig2kvMgDM1c7Ds,69304
125
125
  ultralytics/engine/model.py,sha256=iwwaL2NR5NSwQ7R3juHzS3ds9W-CfhC_CjUcwMvcgsk,53426
126
126
  ultralytics/engine/predictor.py,sha256=4lfw2RbBDE7939011FcSCuznscrcnMuabZtc8GXaKO4,22735
127
127
  ultralytics/engine/results.py,sha256=uQ_tgvdxKAg28pRgb5WCHiqx9Ktu7wYiVbwZy_IJ5bo,71499
128
- ultralytics/engine/trainer.py,sha256=25SIKM5Wi1XbpNz4SckmsfzbF60V-T4wKKa29FhXX1U,41035
129
- ultralytics/engine/tuner.py,sha256=XwWu6gEERpialS_uqswSSI3HjH8Hb9-71TKtu_PGiCU,21656
128
+ ultralytics/engine/trainer.py,sha256=lw3gAXs9JVp4YrEdzfz04UIYB4n_FRvYn6lSF5uwh3Y,41329
129
+ ultralytics/engine/tuner.py,sha256=Cq_iyP3Ur2AbG7sR-Z0p1_szZ34UH0AY0bCwetglqRA,21674
130
130
  ultralytics/engine/validator.py,sha256=7tADPOXRZz0Yi7F-Z5SxcUnwytaa2MfbtuSdO8pp_l4,16966
131
131
  ultralytics/hub/__init__.py,sha256=xCF02lzlPKbdmGfO3NxLuXl5Kb0MaBZp_-fAWDHZ8zw,6698
132
132
  ultralytics/hub/auth.py,sha256=RIwZDWfW6vS2yGpZKR0xVl0-38itJYEFtmqY_M70bl8,6304
@@ -136,7 +136,7 @@ ultralytics/hub/google/__init__.py,sha256=8o3RorFafO_DzlzImXnzNQXtyPM1k-CQ8tsWSf
136
136
  ultralytics/models/__init__.py,sha256=DqQFFYJ4IQlqIDb61H1HzcnZU7SuHN-43bw94-l-YAQ,309
137
137
  ultralytics/models/fastsam/__init__.py,sha256=HGJ8EKlBAsdF-e2aIwQLjSDAFI_r0yHR0A1gzrp4vqE,231
138
138
  ultralytics/models/fastsam/model.py,sha256=vIdl536LUrefjqMFEJ-9UyK4Ta6p2ki2G_gn2DZ9X_Y,3438
139
- ultralytics/models/fastsam/predict.py,sha256=NtvOTkkb5D790qm0iCFbXPeS7kmSvgiaCcJnsjHufes,8962
139
+ ultralytics/models/fastsam/predict.py,sha256=_qTgUNL8L0XQBvpIBZR_GII0Tt1-cjpu11JcbP-8nbM,9086
140
140
  ultralytics/models/fastsam/utils.py,sha256=yuCXB4CVjRx8lDf61DP8B6qMx7TVf7AynQvdWREeFco,884
141
141
  ultralytics/models/fastsam/val.py,sha256=oLxB8vBKTfiT7eBbTzvpqq_xNSvDOjGdP1J7egHGsCA,2041
142
142
  ultralytics/models/nas/__init__.py,sha256=wybeHZuAXMNeXMjKTbK55FZmXJkA4K9IozDeFM9OB-s,207
@@ -144,7 +144,7 @@ ultralytics/models/nas/model.py,sha256=Z2Mq4uiI9Mk2qYLFha5j3efpHVuJ5ySfpdAu9kFGP
144
144
  ultralytics/models/nas/predict.py,sha256=J4UT7nwi_h63lJ3a_gYac-Ws8wFYingZINxMqSoaX5E,2706
145
145
  ultralytics/models/nas/val.py,sha256=QUTE3zuhJLVqmDGd2n7iSSk7X6jKZCRxufFkBbyxYYo,1548
146
146
  ultralytics/models/rtdetr/__init__.py,sha256=_jEHmOjI_QP_nT3XJXLgYHQ6bXG4EL8Gnvn1y_eev1g,225
147
- ultralytics/models/rtdetr/model.py,sha256=e2u6kQEYawRXGGO6HbFDE1uyHfsIqvKk4IpVjjYN41k,2182
147
+ ultralytics/models/rtdetr/model.py,sha256=Pq9QDgaZetDnjxdYSoomj2s6vOGSdpsqVfyN5j0GUmc,2292
148
148
  ultralytics/models/rtdetr/predict.py,sha256=43-gGCHEH7UQQ6H1oXdlDlrM39esnp-YEhqCvZOwtOM,4279
149
149
  ultralytics/models/rtdetr/train.py,sha256=SNntxGHXatbNqn1yna5_dDQiR_ciDK6o_4S7JIHU7EY,3765
150
150
  ultralytics/models/rtdetr/val.py,sha256=l26CzpcYHYC0sQ--rKUFBCYl73nsgAGOj1U3xScNzFs,8918
@@ -188,11 +188,11 @@ ultralytics/models/yolo/segment/predict.py,sha256=HePes5rQ9v3iTCpn3vrIee0SsAsJuJ
188
188
  ultralytics/models/yolo/segment/train.py,sha256=5aPK5FDHLzbXb3R5TCpsAr1O6-8rtupOIoDokY8bSDs,3032
189
189
  ultralytics/models/yolo/segment/val.py,sha256=fJLDJpK1RZgeMvmtf47BjHhZ9lzX_4QfUuBzGXZqIhA,11289
190
190
  ultralytics/models/yolo/world/__init__.py,sha256=nlh8I6t8hMGz_vZg8QSlsUW1R-2eKvn9CGUoPPQEGhA,131
191
- ultralytics/models/yolo/world/train.py,sha256=zVPtVoBedberGkth3tPuIH665HjGNJvTMLw_wLZQM84,7870
191
+ ultralytics/models/yolo/world/train.py,sha256=RRvzSHUnQLaYRaUOjbuvnoL1K3je8-xS3gSeJybfHOY,7986
192
192
  ultralytics/models/yolo/world/train_world.py,sha256=9p9YIckrATaJjGOrpmuC8MbZX9qdoCPCEV9EGZ0sExg,9553
193
193
  ultralytics/models/yolo/yoloe/__init__.py,sha256=6SLytdJtwu37qewf7CobG7C7Wl1m-xtNdvCXEasfPDE,760
194
194
  ultralytics/models/yolo/yoloe/predict.py,sha256=pcbAUbosr1Xc436MfQi6ah3MQ6kkPzjOcltmdA3VMDE,7124
195
- ultralytics/models/yolo/yoloe/train.py,sha256=jcXqGm8CReOCVMFLk-1bNe0Aw5PWaaQa8xBWxtrt5TY,13571
195
+ ultralytics/models/yolo/yoloe/train.py,sha256=jpCSXYZ8WJBzGvMH5oW2DdeMWvTYQhwPwD3papn__9w,13687
196
196
  ultralytics/models/yolo/yoloe/train_seg.py,sha256=aCV7M8oQOvODFnU4piZdJh3tIrBJYAzZfRVRx1vRgxo,4956
197
197
  ultralytics/models/yolo/yoloe/val.py,sha256=5Gd9EoFH0FmKKvWXBl4J7gBe9DVxIczN-s3ceHwdUDo,9458
198
198
  ultralytics/nn/__init__.py,sha256=PJgOn2phQTTBR2P3s_JWvGeGXQpvw1znsumKow4tCuE,545
@@ -203,8 +203,8 @@ ultralytics/nn/modules/__init__.py,sha256=BPMbEm1daI7Tuds3zph2_afAX7Gq1uAqK8BfiC
203
203
  ultralytics/nn/modules/activation.py,sha256=75JcIMH2Cu9GTC2Uf55r_5YLpxcrXQDaVoeGQ0hlUAU,2233
204
204
  ultralytics/nn/modules/block.py,sha256=-5RfsA_ljekL8_bQPGupSn9dVcZ8V_lVsOGlhzIW1kg,70622
205
205
  ultralytics/nn/modules/conv.py,sha256=U6P1ZuzQmIf09noKwp7syuWn-M98Tly2wMWOsDT3kOI,21457
206
- ultralytics/nn/modules/head.py,sha256=RpeAR7U8S5sqegmOk76Ch2a_jH4lnsHTZWft3CHbICA,53308
207
- ultralytics/nn/modules/transformer.py,sha256=l6NuuFF7j_bogcNULHBBdj5l6sf7MwiVEGz8XcRyTUM,31366
206
+ ultralytics/nn/modules/head.py,sha256=FWpgbS8d1My62pyyQH89nbFgHhHIZ-sgSp3YyRet_oY,53308
207
+ ultralytics/nn/modules/transformer.py,sha256=AkWqDGPtk5AgEaAZgP3TObu1nDr4_B_2fzOr3xqq6EY,31470
208
208
  ultralytics/nn/modules/utils.py,sha256=rn8yTObZGkQoqVzjbZWLaHiytppG4ffjMME4Lw60glM,6092
209
209
  ultralytics/solutions/__init__.py,sha256=ZoeAQavTLp8aClnhZ9tbl6lxy86GxofyGvZWTx2aWkI,1209
210
210
  ultralytics/solutions/ai_gym.py,sha256=VHUYkq2AT5Zaee-Px9abvN97thhomz7VDqg0HNZLKLI,5217
@@ -240,7 +240,7 @@ ultralytics/utils/__init__.py,sha256=whSIuj-0lV0SAp4YjOeBJZ2emP1Qa8pqLnrhRiwl2Qs
240
240
  ultralytics/utils/autobatch.py,sha256=i6KYLLSItKP1Q2IUlTPHrZhjcxl7UOjs0Seb8bF8pvM,5124
241
241
  ultralytics/utils/autodevice.py,sha256=d9yq6eEn05fdfzfpxeSECd0YEO61er5f7T-0kjLdofg,8843
242
242
  ultralytics/utils/benchmarks.py,sha256=wBsDrwtc6NRM9rIDmqeGQ_9yxOTetnchXXHwZSUhp18,31444
243
- ultralytics/utils/checks.py,sha256=H4WvEOjaxrsG0pVIpJASGXs0m3yPFUcNZRwZjnSgowQ,34523
243
+ ultralytics/utils/checks.py,sha256=EaZh6gmv8vk9dnmSLNusKBHMh-ZSD4NxA3wXVjVMa_o,35798
244
244
  ultralytics/utils/cpu.py,sha256=OPlVxROWhQp-kEa9EkeNRKRQ-jz0KwySu5a-h91JZjk,3634
245
245
  ultralytics/utils/dist.py,sha256=5xQhWK0OLORvseAL08UmG1LYdkiDVLquxmaGSnqiSqo,4151
246
246
  ultralytics/utils/downloads.py,sha256=JIlHfUg-qna5aOHRJupH7d5zob2qGZtRrs86Cp3zOJs,23029
@@ -256,8 +256,8 @@ ultralytics/utils/nms.py,sha256=AVOmPuUTEJqmq2J6rvjq-nHNxYIyabgzHdc41siyA0w,1416
256
256
  ultralytics/utils/ops.py,sha256=PW3fgw1d18CA2ZNQZVJqUy054cJ_9tIcxd1XnA0FPgU,26905
257
257
  ultralytics/utils/patches.py,sha256=0-2G4jXCIPnMonlft-cPcjfFcOXQS6ODwUDNUwanfg4,6541
258
258
  ultralytics/utils/plotting.py,sha256=XWXZi02smBeFji3BSkMZNNNssXzO-dIxFaD15_N1f-4,47221
259
- ultralytics/utils/tal.py,sha256=LrziY_ZHz4wln3oOnqAzgyPaXKoup17Sa103BpuaQFU,20935
260
- ultralytics/utils/torch_utils.py,sha256=n-CMgLfQsg-SNF281nNHJm_kBdxPIrVr7xrI6gneL20,41771
259
+ ultralytics/utils/tal.py,sha256=7KQYNyetfx18CNc_bvNG7BDb44CIU3DEu4qziVVvNAE,20869
260
+ ultralytics/utils/torch_utils.py,sha256=Cr_PJSjIlAbIkbcz0nojsAqc5m4xpQVBafgRcKFkcow,41271
261
261
  ultralytics/utils/tqdm.py,sha256=ny5RIg2OTkWQ7gdaXfYaoIgR0Xn2_hNGB6tUpO2Unns,16137
262
262
  ultralytics/utils/triton.py,sha256=fbMfTAUyoGiyslWtySzLZw53XmZJa7rF31CYFot0Wjs,5422
263
263
  ultralytics/utils/tuner.py,sha256=9D4dSIvwwxcNSJcH2QJ92qiIVi9zu-1L7_PBZ8okDyE,6816
@@ -275,9 +275,9 @@ ultralytics/utils/callbacks/tensorboard.py,sha256=_4nfGK1dDLn6ijpvphBDhc-AS8qhS3
275
275
  ultralytics/utils/callbacks/wb.py,sha256=ngQO8EJ1kxJDF1YajScVtzBbm26jGuejA0uWeOyvf5A,7685
276
276
  ultralytics/utils/export/__init__.py,sha256=jQtf716PP0jt7bMoY9FkqmjG26KbvDzuR84jGhaBi2U,9901
277
277
  ultralytics/utils/export/imx.py,sha256=Jl5nuNxqaP_bY5yrV2NypmoJSrexHE71TxR72SDdjcg,11394
278
- ultralytics-8.3.202.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
279
- ultralytics-8.3.202.dist-info/METADATA,sha256=3f7-8Xx0HyQtSA5Ao-KRYBmcCLcefUj_KUyJIgslqYo,37667
280
- ultralytics-8.3.202.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
281
- ultralytics-8.3.202.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
282
- ultralytics-8.3.202.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
283
- ultralytics-8.3.202.dist-info/RECORD,,
278
+ ultralytics-8.3.203.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
279
+ ultralytics-8.3.203.dist-info/METADATA,sha256=v5huEVqy-9MpoIFTqlwfsMeiCt0pwrGqnXrQQ1KqgTU,37667
280
+ ultralytics-8.3.203.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
281
+ ultralytics-8.3.203.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
282
+ ultralytics-8.3.203.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
283
+ ultralytics-8.3.203.dist-info/RECORD,,