ultralytics 8.3.127__py3-none-any.whl → 8.3.128__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tests/test_cuda.py CHANGED
@@ -71,8 +71,13 @@ def test_export_engine_matrix(task, dynamic, int8, half, batch):
71
71
  @pytest.mark.skipif(not DEVICES, reason="No CUDA devices available")
72
72
  def test_train():
73
73
  """Test model training on a minimal dataset using available CUDA devices."""
74
- device = DEVICES if len(DEVICES) > 1 else DEVICES[0]
75
- YOLO(MODEL).train(data="coco8.yaml", imgsz=64, epochs=1, device=device) # requires imgsz>=64
74
+ import os
75
+
76
+ device = tuple(DEVICES) if len(DEVICES) > 1 else DEVICES[0]
77
+ results = YOLO(MODEL).train(data="coco8.yaml", imgsz=64, epochs=1, device=device) # requires imgsz>=64
78
+ visible = eval(os.environ["CUDA_VISIBLE_DEVICES"])
79
+ assert visible == device, f"Passed GPUs '{device}', but used GPUs '{visible}'"
80
+ assert results is (None if len(DEVICES) > 1 else not None) # DDP returns None, single-GPU returns metrics
76
81
 
77
82
 
78
83
  @pytest.mark.slow
tests/test_python.py CHANGED
@@ -188,11 +188,11 @@ def test_track_stream():
188
188
  model.track(video_url, imgsz=160, tracker="bytetrack.yaml")
189
189
  model.track(video_url, imgsz=160, tracker="botsort.yaml", save_frames=True) # test frame saving also
190
190
 
191
- # Test Global Motion Compensation (GMC) methods
192
- for gmc in "orb", "sift", "ecc":
191
+ # Test Global Motion Compensation (GMC) methods and ReID
192
+ for gmc, reidm in zip(["orb", "sift", "ecc"], ["auto", "auto", "yolo11n-cls.pt"]):
193
193
  default_args = YAML.load(ROOT / "cfg/trackers/botsort.yaml")
194
194
  custom_yaml = TMP / f"botsort-{gmc}.yaml"
195
- YAML.save(custom_yaml, {**default_args, "gmc_method": gmc})
195
+ YAML.save(custom_yaml, {**default_args, "gmc_method": gmc, "with_reid": True, "model": reidm})
196
196
  model.track(video_url, imgsz=160, tracker=custom_yaml)
197
197
 
198
198
 
ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- __version__ = "8.3.127"
3
+ __version__ = "8.3.128"
4
4
 
5
5
  import os
6
6
 
@@ -82,6 +82,7 @@ from ultralytics.utils import (
82
82
  ARM64,
83
83
  DEFAULT_CFG,
84
84
  IS_COLAB,
85
+ IS_JETSON,
85
86
  LINUX,
86
87
  LOGGER,
87
88
  MACOS,
@@ -682,6 +683,7 @@ class Exporter:
682
683
  @try_export
683
684
  def export_paddle(self, prefix=colorstr("PaddlePaddle:")):
684
685
  """YOLO Paddle export."""
686
+ assert not IS_JETSON, "Jetson Paddle exports not supported yet"
685
687
  check_requirements(("paddlepaddle-gpu" if torch.cuda.is_available() else "paddlepaddle>=3.0.0", "x2paddle"))
686
688
  import x2paddle # noqa
687
689
  from x2paddle.convert import pytorch2paddle # noqa
@@ -105,7 +105,8 @@ class BaseTrainer:
105
105
  self.args = get_cfg(cfg, overrides)
106
106
  self.check_resume(overrides)
107
107
  self.device = select_device(self.args.device, self.args.batch)
108
- self.args.device = str(self.device) # ensure -1 is updated to selected CUDA device
108
+ # update "-1" devices so post-training val does not repeat search
109
+ self.args.device = os.getenv("CUDA_VISIBLE_DEVICES") if "cuda" in str(self.device) else str(self.device)
109
110
  self.validator = None
110
111
  self.metrics = None
111
112
  self.plots = {}
@@ -82,11 +82,9 @@ class DetectionPredictor(BasePredictor):
82
82
 
83
83
  def get_obj_feats(self, feat_maps, idxs):
84
84
  """Extract object features from the feature maps."""
85
- from math import gcd
86
-
87
85
  import torch
88
86
 
89
- s = gcd(*[x.shape[1] for x in feat_maps]) # find smallest vector length
87
+ s = min([x.shape[1] for x in feat_maps]) # find smallest vector length
90
88
  obj_feats = torch.cat(
91
89
  [x.permute(0, 2, 3, 1).reshape(x.shape[0], -1, s, x.shape[1] // s).mean(dim=-1) for x in feat_maps], dim=1
92
90
  ) # mean reduce all vectors to same length
@@ -290,8 +290,8 @@ class AutoBackend(nn.Module):
290
290
  elif engine:
291
291
  LOGGER.info(f"Loading {w} for TensorRT inference...")
292
292
 
293
- if IS_JETSON and check_version(PYTHON_VERSION, "<=3.8.0"):
294
- # fix error: `np.bool` was a deprecated alias for the builtin `bool` for JetPack 4 with Python <= 3.8.0
293
+ if IS_JETSON and check_version(PYTHON_VERSION, "<=3.8.10"):
294
+ # fix error: `np.bool` was a deprecated alias for the builtin `bool` for JetPack 4 and JetPack 5 with Python <= 3.8.10
295
295
  check_requirements("numpy==1.23.5")
296
296
 
297
297
  try: # https://developer.nvidia.com/nvidia-tensorrt-download
@@ -29,16 +29,16 @@ class VisualAISearch(BaseSolution):
29
29
  def __init__(self, **kwargs):
30
30
  """Initializes the VisualAISearch class with the FAISS index file and CLIP model."""
31
31
  super().__init__(**kwargs)
32
- check_requirements(["open-clip-torch", "faiss-cpu"])
32
+ check_requirements(["git+https://github.com/ultralytics/CLIP.git", "faiss-cpu"])
33
+ import clip
33
34
  import faiss
34
- import open_clip
35
35
 
36
36
  self.faiss = faiss
37
- self.open_clip = open_clip
37
+ self.clip = clip
38
38
 
39
39
  self.faiss_index = "faiss.index"
40
40
  self.data_path_npy = "paths.npy"
41
- self.model_name = "ViT-B-32-quickgelu"
41
+ self.model_name = "ViT-B/32"
42
42
  self.data_dir = Path(self.CFG["data"])
43
43
  self.device = select_device(self.CFG["device"])
44
44
 
@@ -51,11 +51,7 @@ class VisualAISearch(BaseSolution):
51
51
  safe_download(url=f"{ASSETS_URL}/images.zip", unzip=True, retry=3)
52
52
  self.data_dir = Path("images")
53
53
 
54
- self.clip_model, _, self.preprocess = self.open_clip.create_model_and_transforms(
55
- self.model_name, pretrained="openai"
56
- )
57
- self.clip_model = self.clip_model.to(self.device).eval()
58
- self.tokenizer = self.open_clip.get_tokenizer(self.model_name)
54
+ self.model, self.preprocess = clip.load(self.model_name, device=self.device)
59
55
 
60
56
  self.index = None
61
57
  self.image_paths = []
@@ -67,13 +63,13 @@ class VisualAISearch(BaseSolution):
67
63
  image = Image.open(path)
68
64
  tensor = self.preprocess(image).unsqueeze(0).to(self.device)
69
65
  with torch.no_grad():
70
- return self.clip_model.encode_image(tensor).cpu().numpy()
66
+ return self.model.encode_image(tensor).cpu().numpy()
71
67
 
72
68
  def extract_text_feature(self, text):
73
69
  """Extract CLIP text embedding."""
74
- tokens = self.tokenizer([text]).to(self.device)
70
+ tokens = self.clip.tokenize([text]).to(self.device)
75
71
  with torch.no_grad():
76
- return self.clip_model.encode_text(tokens).cpu().numpy()
72
+ return self.model.encode_text(tokens).cpu().numpy()
77
73
 
78
74
  def load_or_build_index(self):
79
75
  """Loads FAISS index or builds a new one from image features."""
@@ -0,0 +1,160 @@
1
+ <!-- Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license -->
2
+
3
+ <!--Similarity search webpage-->
4
+ <!doctype html>
5
+ <html lang="en">
6
+ <head>
7
+ <meta charset="UTF-8" />
8
+ <meta name="viewport" content="width=device-width, initial-scale=1.0" />
9
+ <title>Semantic Image Search</title>
10
+ <link
11
+ href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600&display=swap"
12
+ rel="stylesheet"
13
+ />
14
+ <style>
15
+ body {
16
+ background: linear-gradient(135deg, #f0f4ff, #f9fbff);
17
+ font-family: "Inter", sans-serif;
18
+ color: #111e68;
19
+ padding: 2rem;
20
+ margin: 0;
21
+ min-height: 100vh;
22
+ }
23
+
24
+ h1 {
25
+ text-align: center;
26
+ margin-bottom: 2rem;
27
+ font-size: 2.5rem;
28
+ font-weight: 600;
29
+ }
30
+
31
+ form {
32
+ display: flex;
33
+ flex-wrap: wrap;
34
+ justify-content: center;
35
+ align-items: center;
36
+ gap: 1rem;
37
+ margin-bottom: 3rem;
38
+ animation: fadeIn 1s ease-in-out;
39
+ }
40
+
41
+ input[type="text"] {
42
+ width: 300px;
43
+ padding: 0.75rem 1rem;
44
+ font-size: 1rem;
45
+ border-radius: 10px;
46
+ border: 1px solid #ccc;
47
+ box-shadow: 0 2px 6px rgba(0, 0, 0, 0.05);
48
+ transition: box-shadow 0.3s ease;
49
+ }
50
+
51
+ input[type="text"]:focus {
52
+ outline: none;
53
+ box-shadow: 0 0 0 3px rgba(17, 30, 104, 0.2);
54
+ }
55
+
56
+ button {
57
+ background-color: #111e68;
58
+ color: white;
59
+ font-weight: 600;
60
+ font-size: 1rem;
61
+ padding: 0.75rem 1.5rem;
62
+ border-radius: 10px;
63
+ border: none;
64
+ cursor: pointer;
65
+ transition:
66
+ background-color 0.3s ease,
67
+ transform 0.2s ease;
68
+ }
69
+
70
+ button:hover {
71
+ background-color: #1f2e9f;
72
+ transform: translateY(-2px);
73
+ }
74
+
75
+ .grid {
76
+ display: grid;
77
+ grid-template-columns: repeat(auto-fill, minmax(260px, 1fr));
78
+ gap: 1.5rem;
79
+ max-width: 1600px;
80
+ margin: auto;
81
+ animation: fadeInUp 1s ease-in-out;
82
+ }
83
+
84
+ .card {
85
+ background: white;
86
+ border-radius: 16px;
87
+ overflow: hidden;
88
+ box-shadow: 0 6px 14px rgba(0, 0, 0, 0.08);
89
+ transition:
90
+ transform 0.3s ease,
91
+ box-shadow 0.3s ease;
92
+ }
93
+
94
+ .card:hover {
95
+ transform: translateY(-6px);
96
+ box-shadow: 0 10px 20px rgba(0, 0, 0, 0.1);
97
+ }
98
+
99
+ .card img {
100
+ width: 100%;
101
+ height: 100%;
102
+ object-fit: cover;
103
+ display: block;
104
+ }
105
+
106
+ @keyframes fadeIn {
107
+ 0% {
108
+ opacity: 0;
109
+ transform: scale(0.95);
110
+ }
111
+ 100% {
112
+ opacity: 1;
113
+ transform: scale(1);
114
+ }
115
+ }
116
+
117
+ @keyframes fadeInUp {
118
+ 0% {
119
+ opacity: 0;
120
+ transform: translateY(20px);
121
+ }
122
+ 100% {
123
+ opacity: 1;
124
+ transform: translateY(0);
125
+ }
126
+ }
127
+ </style>
128
+ </head>
129
+ <body>
130
+ <div style="text-align: center; margin-bottom: 1rem">
131
+ <img
132
+ src="https://raw.githubusercontent.com/ultralytics/assets/main/logo/favicon.png"
133
+ alt="Ultralytics Logo"
134
+ style="height: 40px"
135
+ />
136
+ </div>
137
+ <h1>Semantic Image Search with AI</h1>
138
+
139
+ <!-- Search box -->
140
+ <form method="POST">
141
+ <input
142
+ type="text"
143
+ name="query"
144
+ placeholder="Describe the scene (e.g., man walking)"
145
+ value="{{ request.form['query'] }}"
146
+ required
147
+ />
148
+ <button type="submit">Search</button>
149
+ </form>
150
+
151
+ <!-- Search results grid -->
152
+ <div class="grid">
153
+ {% for img in results %}
154
+ <div class="card">
155
+ <img src="{{ url_for('static', filename=img) }}" alt="Result Image" />
156
+ </div>
157
+ {% endfor %}
158
+ </div>
159
+ </body>
160
+ </html>
@@ -255,6 +255,6 @@ class ReID:
255
255
  def __call__(self, img, dets):
256
256
  """Extract embeddings for detected objects."""
257
257
  feats = self.model([save_one_box(det, img, save=False) for det in xywh2xyxy(torch.from_numpy(dets[:, :4]))])
258
- if feats.shape[0] != dets.shape[0] and feats[0].shape[0] == dets.shape[0]:
258
+ if len(feats) != dets.shape[0] and feats[0].shape[0] == dets.shape[0]:
259
259
  feats = feats[0] # batched prediction with non-PyTorch backend
260
260
  return [f.cpu().numpy() for f in feats]
@@ -44,6 +44,8 @@ def on_predict_start(predictor: object, persist: bool = False) -> None:
44
44
  if cfg.tracker_type not in {"bytetrack", "botsort"}:
45
45
  raise AssertionError(f"Only 'bytetrack' and 'botsort' are supported for now, but got '{cfg.tracker_type}'")
46
46
 
47
+ predictor._feats = None # reset in case used earlier
48
+ predictor.save_feats = False
47
49
  if cfg.tracker_type == "botsort" and cfg.with_reid and cfg.model == "auto":
48
50
  from ultralytics.nn.modules.head import Detect
49
51
 
@@ -55,7 +57,6 @@ def on_predict_start(predictor: object, persist: bool = False) -> None:
55
57
  cfg.model = "yolo11n-cls.pt"
56
58
  else:
57
59
  predictor.save_feats = True
58
- predictor._feats = None
59
60
 
60
61
  # Register hook to extract input of Detect layer
61
62
  def pre_hook(module, input):
@@ -40,7 +40,7 @@ import torch.cuda
40
40
  from ultralytics import YOLO, YOLOWorld
41
41
  from ultralytics.cfg import TASK2DATA, TASK2METRIC
42
42
  from ultralytics.engine.exporter import export_formats
43
- from ultralytics.utils import ARM64, ASSETS, LINUX, LOGGER, MACOS, TQDM, WEIGHTS_DIR, YAML
43
+ from ultralytics.utils import ARM64, ASSETS, IS_JETSON, LINUX, LOGGER, MACOS, TQDM, WEIGHTS_DIR, YAML
44
44
  from ultralytics.utils.checks import IS_PYTHON_3_13, check_imgsz, check_requirements, check_yolo, is_rockchip
45
45
  from ultralytics.utils.downloads import safe_download
46
46
  from ultralytics.utils.files import file_size
@@ -126,7 +126,7 @@ def benchmark(
126
126
  assert not isinstance(model, YOLOWorld), "YOLOWorldv2 Paddle exports not supported yet"
127
127
  assert model.task != "obb", "Paddle OBB bug https://github.com/PaddlePaddle/Paddle/issues/72024"
128
128
  assert not is_end2end, "End-to-end models not supported by PaddlePaddle yet"
129
- assert LINUX or MACOS, "Windows Paddle exports not supported yet"
129
+ assert (LINUX and not IS_JETSON) or MACOS, "Windows and Jetson Paddle exports not supported yet"
130
130
  if i == 12: # MNN
131
131
  assert not isinstance(model, YOLOWorld), "YOLOWorldv2 MNN exports not supported yet"
132
132
  if i == 13: # NCNN
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ultralytics
3
- Version: 8.3.127
3
+ Version: 8.3.128
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -1,13 +1,13 @@
1
1
  tests/__init__.py,sha256=xnMhv3O_DF1YrW4zk__ZywQzAaoTDjPKPoiI1Ktss1w,670
2
2
  tests/conftest.py,sha256=rsIAipRKfrVNoTaJ1LdpYue8AbcJ_fr3d3WIlM_6uXY,2982
3
3
  tests/test_cli.py,sha256=PtMFl5Lp_6ygBbYDJ1ndofz2k7ZYupMPEAiZw6aZVm8,5450
4
- tests/test_cuda.py,sha256=vMjegc23QlEzMdpzav2JEjXR1n8W-lYZ-KLGiLiwLok,6167
4
+ tests/test_cuda.py,sha256=7HKiXWQM4hUdouksEB7DJILos0gb6St7fIGqx6YMkLQ,6448
5
5
  tests/test_engine.py,sha256=aGqZ8P7QO5C_nOa1b4FOyk92Ysdk5WiP-ST310Vyxys,4962
6
6
  tests/test_exports.py,sha256=dhZn86LdbapW15RthQF870LGxDjC1MUZhlGdBgPmgIQ,9716
7
7
  tests/test_integrations.py,sha256=dQteeRsRVuT_p5-T88-7jqT65Zm9iAXkyKg-KQ1_TQ8,6341
8
- tests/test_python.py,sha256=hkOJc0Ejin3Bywyw0BT4pPex5hwwfbmw0K5ChRtvdvw,25398
8
+ tests/test_python.py,sha256=m3tV3atrc3DvXZ5S-_C1ief_pDo4KlLgudjc7rq26l0,25492
9
9
  tests/test_solutions.py,sha256=IFlqyOUCvGbLe_YZqWmNCe_afg4as0p-SfAv3j7VURI,6205
10
- ultralytics/__init__.py,sha256=rW2L-G5wnwjbBDeXcA2NLIjdA291K6epdl33-rsgZak,730
10
+ ultralytics/__init__.py,sha256=eYHrIAy7F9bwg7pfP06EyjopprNxJRb4oqv7VuSEe8w,730
11
11
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
12
12
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
13
13
  ultralytics/cfg/__init__.py,sha256=We3ti0mvUQrGRmUPcufDGboW0YAO3nSRYuoWxGagk3M,39462
@@ -117,11 +117,11 @@ ultralytics/data/scripts/get_coco.sh,sha256=UuJpJeo3qQpTHVINeOpmP0NYmg8PhEFE3A8J
117
117
  ultralytics/data/scripts/get_coco128.sh,sha256=qmRQl_hOKrsdHrTrnyQuFIH01oDz3lfaz138OgGfLt8,650
118
118
  ultralytics/data/scripts/get_imagenet.sh,sha256=hr42H16bM47iT27rgS7MpEo-GeOZAYUQXgr0B2cwn48,1705
119
119
  ultralytics/engine/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
120
- ultralytics/engine/exporter.py,sha256=aaZ_-np1q0klWtDXp6CxVjyiZ0DDXx-8Pqg4jZSByuE,70246
120
+ ultralytics/engine/exporter.py,sha256=XDJboUBDGDrFsppwTVujoGilf5vTkO14KYMhMu5YZQ0,70333
121
121
  ultralytics/engine/model.py,sha256=37qGh6aqqPTUyMfpsvBQMaZ1Av7eJDe6mfRl9GvlfKg,52860
122
122
  ultralytics/engine/predictor.py,sha256=YJ5l-0qIpr6JAJxowswtZ0IqmXBqVTvAA9vR40v0sCM,21752
123
123
  ultralytics/engine/results.py,sha256=-JPBn_YMyZv6HhdlyhjRIZCcMf41LTyWID7JrEP64rc,79632
124
- ultralytics/engine/trainer.py,sha256=sQCtjCI7_qOvXp4z-OPIQB1Nnqgeoi8YAIJAiCs_OOY,38951
124
+ ultralytics/engine/trainer.py,sha256=aj41kXVeNfJOlMhSNrW_XwElQ5D0jtuX6ezJC2w8xa8,39046
125
125
  ultralytics/engine/tuner.py,sha256=zEW1UpLlZ6N4xbvS7MxICkshRlaFgLNfuADA0VfRpao,12629
126
126
  ultralytics/engine/validator.py,sha256=jfV81wuFDgrVVXEcPzgOpxAPrAZn-1LgpKwu9l_1-ts,17050
127
127
  ultralytics/hub/__init__.py,sha256=wDtAUKdfqob95tfFHgDJFXcsNSDSdoIQkJTm-CfIUTI,6616
@@ -168,7 +168,7 @@ ultralytics/models/yolo/classify/predict.py,sha256=JV9szginTQ9Lpob0FozhKMiEIu1vV
168
168
  ultralytics/models/yolo/classify/train.py,sha256=rv2CJv9fzvtHf2q4l5g0RsjplWKeLpz637kKqjtrLNY,9737
169
169
  ultralytics/models/yolo/classify/val.py,sha256=xk-YwSQdl_oqyCBV0OOAOcXFL6CchebFOc36AkRSyjE,9992
170
170
  ultralytics/models/yolo/detect/__init__.py,sha256=GIRsLYR-kT4JJx7lh4ZZAFGBZj0aebokuU0A7JbjDVA,257
171
- ultralytics/models/yolo/detect/predict.py,sha256=n1-WmzkvW3dHglI7XrxDr4i0nZ236h6Wh37TAWXpFfo,5341
171
+ ultralytics/models/yolo/detect/predict.py,sha256=DOjhYCHPFPPAwZLWWmNt0d7lGka8GFeriM0OA9PTEGU,5310
172
172
  ultralytics/models/yolo/detect/train.py,sha256=YOEmUZkfJBq6hNbB_P10k-uy4_2fUgdPfVWzO4y8Egs,9538
173
173
  ultralytics/models/yolo/detect/val.py,sha256=7AB_wZi7aQ9_V1pZQSWk5qiJYS34fuO3P5aX7_3eeFE,18471
174
174
  ultralytics/models/yolo/obb/__init__.py,sha256=tQmpG8wVHsajWkZdmD6cjGohJ4ki64iSXQT8JY_dydo,221
@@ -192,7 +192,7 @@ ultralytics/models/yolo/yoloe/train.py,sha256=St3zw_XWRol9pODWU4lvKlJnWYr1lmWQNu
192
192
  ultralytics/models/yolo/yoloe/train_seg.py,sha256=l0SOMQQd0Y_EBBHhTNekgrQsftqhYyK4oWTdCg1dLrE,4633
193
193
  ultralytics/models/yolo/yoloe/val.py,sha256=oA8cVT3pBXF6aPZy7ITq0mDcktRuIgks8tTtqMRISyY,8431
194
194
  ultralytics/nn/__init__.py,sha256=rjociYD9lo_K-d-1s6TbdWklPLjTcEHk7OIlRDJstIE,615
195
- ultralytics/nn/autobackend.py,sha256=03DGRLuVDJ8T2zWFqmAX0eOhy42bhIRS7KdpSII8bEE,39309
195
+ ultralytics/nn/autobackend.py,sha256=9uuLVg5_1irhw2OYahkVoGWmcyB61jVFBVZEiN8GQ1A,39325
196
196
  ultralytics/nn/tasks.py,sha256=0rnM6Z01BUnRtUwCkTwVsPxZ_D3A5tNbBjd7aEoxxns,62943
197
197
  ultralytics/nn/text_model.py,sha256=8_7SRejKZA4Pi-ha0gjcWrQDDCDMBhtwlg8pPMWgjDE,13145
198
198
  ultralytics/nn/modules/__init__.py,sha256=dXLtIk9rt944WfsTdpgEdWOg3HQEHdwQztuZ6WNJygs,3144
@@ -216,17 +216,18 @@ ultralytics/solutions/parking_management.py,sha256=BV-2lpSfgmK7fib3DnPSZ5rtLdy11
216
216
  ultralytics/solutions/queue_management.py,sha256=p1-cuI_rs4ygtlBryXjE65NYG2bnZXhp3ylggFnWcRs,4344
217
217
  ultralytics/solutions/region_counter.py,sha256=Zn35YRXNzhBk27D9MLOHBYe2L1o6H2ey3mEwCXofB_E,5418
218
218
  ultralytics/solutions/security_alarm.py,sha256=cmUWvz7U9IAxlOr-QCIU_j95lc2c8eUx9wI04t1vDFU,6251
219
- ultralytics/solutions/similarity_search.py,sha256=joejjaw0FWfZKnkNJQhT9l7Hz9jkquLu8JY7B6Iy93g,7535
219
+ ultralytics/solutions/similarity_search.py,sha256=WTYmHNHfFrRiJ6mrZhJvGPsjt3szQUiM6VRpw2eBRjA,7332
220
220
  ultralytics/solutions/solutions.py,sha256=aXU5p6zv8UPyaC8v51tsE9L_KzmnRCP4M9PP6pAYMXQ,32715
221
221
  ultralytics/solutions/speed_estimation.py,sha256=r7S5nGIx8PTV-zC4zCI36lQD2DVy5cen5cTXItfQIHo,5318
222
222
  ultralytics/solutions/streamlit_inference.py,sha256=M0ppTFInqSPrdytZBLH8x-XoA7zFc7PaRQ51wHG9ppU,9846
223
223
  ultralytics/solutions/trackzone.py,sha256=mfklnZcVRqI3bbhPiHF2iSoV6INcd10wwwGP4tlK7L0,3854
224
224
  ultralytics/solutions/vision_eye.py,sha256=7YrMqZkR28LLNHWxX3Ye78GvPdXXuouQAmgMdGwRLQ4,2953
225
+ ultralytics/solutions/templates/similarity-search.html,sha256=DPoAO-1H-KXNt_T8mGtSCsYUEi_5Nrx01p0cZfX-E8Q,3790
225
226
  ultralytics/trackers/__init__.py,sha256=Zlu_Ig5osn7hqch_g5Be_e4pwZUkeeTQiesJCi0pFGI,255
226
227
  ultralytics/trackers/basetrack.py,sha256=LYvWB5d7Woyrz_RlxaopjV07RQKH3sff_lZJfMcMxcA,4450
227
- ultralytics/trackers/bot_sort.py,sha256=rpaj7X8COT0Vi5GFR9z-CGSBgJ7gTfFx2wTSZFTnhco,11466
228
+ ultralytics/trackers/bot_sort.py,sha256=fAMV6PJE19jXe-6u524bpcz7x3Ssauk3b3wKXUYpvoY,11462
228
229
  ultralytics/trackers/byte_tracker.py,sha256=D7JQ_6V8OUMQryxTrAr010UXMSaboQnI7T1xppzHXYg,20921
229
- ultralytics/trackers/track.py,sha256=ghFyAaXg1fp7QPX_SDWkH05cx07xnAlhUypkT3djXD0,4825
230
+ ultralytics/trackers/track.py,sha256=hTh-qRZvCrnmo8TsfMQK8sp1F7qeUi97jgtXX-xhX3I,4880
230
231
  ultralytics/trackers/utils/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
231
232
  ultralytics/trackers/utils/gmc.py,sha256=dz3I5LbIv7h1__Xg7rGHecQFE32VFTe54tUnxb8F0Z8,14466
232
233
  ultralytics/trackers/utils/kalman_filter.py,sha256=A0CqOnnaKH6kr0XwuHzyHmIU6aJAjJYxF9jVlNBKZHo,21326
@@ -234,7 +235,7 @@ ultralytics/trackers/utils/matching.py,sha256=7eIufSdeN7cXuFMjvcfvz0Ldq84m4YKZl5
234
235
  ultralytics/utils/__init__.py,sha256=YSBOQcgak2v6l03EHPjkpzH-ZtjVXrg2_4o0BF1cqDQ,52807
235
236
  ultralytics/utils/autobatch.py,sha256=kg05q2qKg74y_Uq2vvr01i3KhLfpVR7sT0IXBt3_kyI,4921
236
237
  ultralytics/utils/autodevice.py,sha256=OrLSk34UpW0I5ndxnkQEIWBxL--CvAON_W9Qw51zOGA,7233
237
- ultralytics/utils/benchmarks.py,sha256=1Y6R1DxdSOzeHRsKKgMOab_bdtEWF9z32HOU2hqgzss,30172
238
+ ultralytics/utils/benchmarks.py,sha256=lDNNnLeLUzmqKrqrqlCOiau-q7A-gcLooZP2dbxCu-U,30214
238
239
  ultralytics/utils/checks.py,sha256=Z87AuJ3C5JcTVYdhAn31BFErmF48bRyMc4_WZ9ku5-E,32711
239
240
  ultralytics/utils/dist.py,sha256=aytW0JEkcA5ZTZucV92ot7Bn-apiej8aLk3QNWicjAc,4103
240
241
  ultralytics/utils/downloads.py,sha256=Rn8xDwn2bzgBqiYz3Xn0rm3MWjk4T-QUd2Ajlu1EpQ4,22312
@@ -262,9 +263,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=JaI95Cj2kIjUhlEEOiDN0-Drc-fDelLhNI
262
263
  ultralytics/utils/callbacks/raytune.py,sha256=A8amUGpux7dYES-L1iSeMoMXBySGWCD1aUqT7vcG-pU,1284
263
264
  ultralytics/utils/callbacks/tensorboard.py,sha256=jgYnym3cUQFAgN1GzTyO7l3jINtfAh8zhrllDvnLuVQ,5339
264
265
  ultralytics/utils/callbacks/wb.py,sha256=iDRFXI4IIDm8R5OI89DMTmjs8aHLo1HRCLkOFKdaMG4,7507
265
- ultralytics-8.3.127.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
266
- ultralytics-8.3.127.dist-info/METADATA,sha256=bWJ0fJoFESEzhGqAO3ox4uQ5b0EnneDRxfHn3D1efDs,37223
267
- ultralytics-8.3.127.dist-info/WHEEL,sha256=GHB6lJx2juba1wDgXDNlMTyM13ckjBMKf-OnwgKOCtA,91
268
- ultralytics-8.3.127.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
269
- ultralytics-8.3.127.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
270
- ultralytics-8.3.127.dist-info/RECORD,,
266
+ ultralytics-8.3.128.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
267
+ ultralytics-8.3.128.dist-info/METADATA,sha256=-IGLMF2lUP_NBqvLF8v0L8mTJX9DuFEiU5yxPFcXlhY,37223
268
+ ultralytics-8.3.128.dist-info/WHEEL,sha256=0CuiUZ_p9E4cD6NyLD6UG80LBXYyiSYZOKDm5lp32xk,91
269
+ ultralytics-8.3.128.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
270
+ ultralytics-8.3.128.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
271
+ ultralytics-8.3.128.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.3.0)
2
+ Generator: setuptools (80.3.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5